[ 504.197172] env[68217]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68217) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 504.197502] env[68217]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68217) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 504.197538] env[68217]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68217) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 504.197874] env[68217]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 504.295297] env[68217]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68217) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:349}} [ 504.305414] env[68217]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=68217) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:372}} [ 504.348614] env[68217]: INFO oslo_service.periodic_task [-] Skipping periodic task _heal_instance_info_cache because its interval is negative [ 504.906754] env[68217]: INFO nova.virt.driver [None req-a34923c1-9619-4e15-bd3e-79eec9e17788 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 504.979491] env[68217]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 504.979650] env[68217]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 504.979818] env[68217]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68217) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 508.238553] env[68217]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-3d343273-aacc-4098-9a7c-f72130b69cb1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.255159] env[68217]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68217) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 508.255296] env[68217]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-6c5f8f53-d50c-451f-864c-d33f3e19c66e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.287602] env[68217]: INFO oslo_vmware.api [-] Successfully established new session; session ID is ec33a. [ 508.287753] env[68217]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.308s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 508.288351] env[68217]: INFO nova.virt.vmwareapi.driver [None req-a34923c1-9619-4e15-bd3e-79eec9e17788 None None] VMware vCenter version: 7.0.3 [ 508.292062] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e268df8-263c-496a-8299-256689e610dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.313996] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298a4366-9426-40dc-b63e-09d1268eb975 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.320103] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5074a960-d786-41a0-b266-0ce55b968493 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.326850] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa6810f-27ce-4a0b-b7f9-4a52f414b0eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.340067] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82fd79b-0bf6-47f8-b0bf-a2f1a09eedb9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.346405] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bf5525-cde3-4ee4-a2b9-b553b6201ddd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.377218] env[68217]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-e4386eae-53e1-4dcb-999c-c2988e9a54eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 508.382623] env[68217]: DEBUG nova.virt.vmwareapi.driver [None req-a34923c1-9619-4e15-bd3e-79eec9e17788 None None] Extension org.openstack.compute already exists. {{(pid=68217) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:228}} [ 508.385334] env[68217]: INFO nova.compute.provider_config [None req-a34923c1-9619-4e15-bd3e-79eec9e17788 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 508.889331] env[68217]: DEBUG nova.context [None req-a34923c1-9619-4e15-bd3e-79eec9e17788 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),b15110cb-0210-4aa2-a11e-16f1db2072a3(cell1) {{(pid=68217) load_cells /opt/stack/nova/nova/context.py:464}} [ 508.891418] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 508.891638] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 508.892376] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 508.892778] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Acquiring lock "b15110cb-0210-4aa2-a11e-16f1db2072a3" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 508.892967] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Lock "b15110cb-0210-4aa2-a11e-16f1db2072a3" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 508.893993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Lock "b15110cb-0210-4aa2-a11e-16f1db2072a3" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 508.914931] env[68217]: INFO dbcounter [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Registered counter for database nova_cell0 [ 508.922888] env[68217]: INFO dbcounter [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Registered counter for database nova_cell1 [ 508.926117] env[68217]: DEBUG oslo_db.sqlalchemy.engines [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68217) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 508.926474] env[68217]: DEBUG oslo_db.sqlalchemy.engines [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68217) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:397}} [ 508.931661] env[68217]: ERROR nova.db.main.api [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 508.931661] env[68217]: result = function(*args, **kwargs) [ 508.931661] env[68217]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 508.931661] env[68217]: return func(*args, **kwargs) [ 508.931661] env[68217]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 508.931661] env[68217]: result = fn(*args, **kwargs) [ 508.931661] env[68217]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 508.931661] env[68217]: return f(*args, **kwargs) [ 508.931661] env[68217]: File "/opt/stack/nova/nova/objects/service.py", line 560, in _db_service_get_minimum_version [ 508.931661] env[68217]: return db.service_get_minimum_version(context, binaries) [ 508.931661] env[68217]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 508.931661] env[68217]: _check_db_access() [ 508.931661] env[68217]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 508.931661] env[68217]: stacktrace = ''.join(traceback.format_stack()) [ 508.931661] env[68217]: [ 508.932517] env[68217]: ERROR nova.db.main.api [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 272, in main [ 508.932517] env[68217]: result = function(*args, **kwargs) [ 508.932517] env[68217]: File "/opt/stack/nova/nova/utils.py", line 663, in context_wrapper [ 508.932517] env[68217]: return func(*args, **kwargs) [ 508.932517] env[68217]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 508.932517] env[68217]: result = fn(*args, **kwargs) [ 508.932517] env[68217]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 508.932517] env[68217]: return f(*args, **kwargs) [ 508.932517] env[68217]: File "/opt/stack/nova/nova/objects/service.py", line 560, in _db_service_get_minimum_version [ 508.932517] env[68217]: return db.service_get_minimum_version(context, binaries) [ 508.932517] env[68217]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 508.932517] env[68217]: _check_db_access() [ 508.932517] env[68217]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 508.932517] env[68217]: stacktrace = ''.join(traceback.format_stack()) [ 508.932517] env[68217]: [ 508.932896] env[68217]: WARNING nova.objects.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Failed to get minimum service version for cell b15110cb-0210-4aa2-a11e-16f1db2072a3 [ 508.933046] env[68217]: WARNING nova.objects.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 508.933478] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Acquiring lock "singleton_lock" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 508.933636] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Acquired lock "singleton_lock" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 508.933869] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Releasing lock "singleton_lock" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 508.934213] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Full set of CONF: {{(pid=68217) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/service.py:357}} [ 508.934359] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ******************************************************************************** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2804}} [ 508.934484] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] Configuration options gathered from: {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2805}} [ 508.934616] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 508.934808] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 508.934928] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ================================================================================ {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 508.935162] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] allow_resize_to_same_host = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.935332] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] arq_binding_timeout = 300 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.935458] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] backdoor_port = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.935581] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] backdoor_socket = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.935740] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] block_device_allocate_retries = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.935897] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] block_device_allocate_retries_interval = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.936077] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cert = self.pem {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.936250] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.936414] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute_monitors = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.936579] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] config_dir = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.936745] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] config_drive_format = iso9660 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.936877] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.937050] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] config_source = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.937221] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] console_host = devstack {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.937384] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] control_exchange = nova {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.937538] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cpu_allocation_ratio = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.937696] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] daemon = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.937858] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] debug = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.938022] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] default_access_ip_network_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.938190] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] default_availability_zone = nova {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.938344] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] default_ephemeral_format = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.938499] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] default_green_pool_size = 1000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.938737] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.938900] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] default_schedule_zone = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.939102] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] disk_allocation_ratio = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.939276] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] enable_new_services = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.939453] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] enabled_apis = ['osapi_compute'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.939615] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] enabled_ssl_apis = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.939775] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] flat_injected = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.939933] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] force_config_drive = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.940140] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] force_raw_images = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.940321] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] graceful_shutdown_timeout = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.940485] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] heal_instance_info_cache_interval = -1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.940709] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] host = cpu-1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.940881] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.941055] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] initial_disk_allocation_ratio = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.941219] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] initial_ram_allocation_ratio = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.941443] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.941608] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instance_build_timeout = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.941763] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instance_delete_interval = 300 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.941927] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instance_format = [instance: %(uuid)s] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.942107] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instance_name_template = instance-%08x {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.942272] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instance_usage_audit = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.942443] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instance_usage_audit_period = month {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.942606] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.942769] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] instances_path = /opt/stack/data/nova/instances {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.942932] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] internal_service_availability_zone = internal {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.943100] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] key = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.943265] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] live_migration_retry_count = 30 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.943438] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_color = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.943606] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_config_append = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.943768] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.943925] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_dir = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.944092] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_file = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.944223] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_options = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.944380] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_rotate_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.944544] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_rotate_interval_type = days {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.944706] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] log_rotation_type = none {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.944831] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.944954] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.945138] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.945303] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.945427] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.945585] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] long_rpc_timeout = 1800 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.945741] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] max_concurrent_builds = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.945894] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] max_concurrent_live_migrations = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.946062] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] max_concurrent_snapshots = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.946226] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] max_local_block_devices = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.946382] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] max_logfile_count = 30 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.946536] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] max_logfile_size_mb = 200 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.946688] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] maximum_instance_delete_attempts = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.946852] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] metadata_listen = 0.0.0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.947025] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] metadata_listen_port = 8775 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.947194] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] metadata_workers = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.947353] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] migrate_max_retries = -1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.947514] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] mkisofs_cmd = genisoimage {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.947717] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] my_block_storage_ip = 10.180.1.21 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.947847] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] my_ip = 10.180.1.21 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.948060] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] my_shared_fs_storage_ip = 10.180.1.21 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.948226] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] network_allocate_retries = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.948402] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.948564] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] osapi_compute_listen = 0.0.0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.948723] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] osapi_compute_listen_port = 8774 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.948885] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] osapi_compute_unique_server_name_scope = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.949083] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] osapi_compute_workers = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.949254] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] password_length = 12 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.949412] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] periodic_enable = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.949570] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] periodic_fuzzy_delay = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.949730] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] pointer_model = usbtablet {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.949890] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] preallocate_images = none {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.950082] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] publish_errors = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.950225] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] pybasedir = /opt/stack/nova {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.950383] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ram_allocation_ratio = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.950539] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] rate_limit_burst = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.950703] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] rate_limit_except_level = CRITICAL {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.950856] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] rate_limit_interval = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.951023] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] reboot_timeout = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.951183] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] reclaim_instance_interval = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.951335] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] record = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.951498] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] reimage_timeout_per_gb = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.951658] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] report_interval = 120 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.951814] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] rescue_timeout = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.951970] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] reserved_host_cpus = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.952138] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] reserved_host_disk_mb = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.952295] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] reserved_host_memory_mb = 512 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.952451] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] reserved_huge_pages = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.952607] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] resize_confirm_window = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.952762] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] resize_fs_using_block_device = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.952918] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] resume_guests_state_on_host_boot = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.953099] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.953265] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] rpc_response_timeout = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.953429] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] run_external_periodic_tasks = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.953592] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] running_deleted_instance_action = reap {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.953751] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] running_deleted_instance_poll_interval = 1800 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.953908] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] running_deleted_instance_timeout = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.954080] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler_instance_sync_interval = 120 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.954249] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_down_time = 720 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.954415] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] servicegroup_driver = db {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.954570] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] shell_completion = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.954726] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] shelved_offload_time = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.954884] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] shelved_poll_interval = 3600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.955071] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] shutdown_timeout = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.955240] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] source_is_ipv6 = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.955402] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ssl_only = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.955652] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.955817] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] sync_power_state_interval = 600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.955975] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] sync_power_state_pool_size = 1000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.956158] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] syslog_log_facility = LOG_USER {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.956316] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] tempdir = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.956471] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] timeout_nbd = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.956634] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] transport_url = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.956791] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] update_resources_interval = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.956948] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] use_cow_images = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.957111] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] use_journal = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.957270] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] use_json = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.957422] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] use_rootwrap_daemon = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.957577] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] use_stderr = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.957732] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] use_syslog = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.957886] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vcpu_pin_set = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.958060] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plugging_is_fatal = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.958232] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plugging_timeout = 300 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.958398] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] virt_mkfs = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.958557] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] volume_usage_poll_interval = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.958712] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] watch_log_file = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.958873] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] web = /usr/share/spice-html5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2817}} [ 508.959088] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.959270] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.959435] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.959607] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_concurrency.disable_process_locking = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.960231] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.960435] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.960612] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.960789] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.960966] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.961156] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.961344] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.auth_strategy = keystone {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.961512] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.compute_link_prefix = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.961690] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.961866] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.dhcp_domain = novalocal {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.962046] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.enable_instance_password = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.962216] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.glance_link_prefix = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.962378] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.962563] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.962709] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.instance_list_per_project_cells = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.962872] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.list_records_by_skipping_down_cells = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.963043] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.local_metadata_per_cell = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.963215] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.max_limit = 1000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.963377] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.metadata_cache_expiration = 15 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.963546] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.neutron_default_tenant_id = default {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.963713] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.response_validation = warn {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.963880] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.use_neutron_default_nets = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.964059] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.964228] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.964396] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.964563] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.964732] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.vendordata_dynamic_targets = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.964893] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.vendordata_jsonfile_path = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.965145] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.965362] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.backend = dogpile.cache.memcached {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.965533] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.backend_argument = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.965694] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.backend_expiration_time = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.965863] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.config_prefix = cache.oslo {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.966042] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.dead_timeout = 60.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.966212] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.debug_cache_backend = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.966374] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.enable_retry_client = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.966535] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.enable_socket_keepalive = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.966701] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.enabled = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.966866] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.enforce_fips_mode = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.967036] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.expiration_time = 600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.967206] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.hashclient_retry_attempts = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.967368] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.hashclient_retry_delay = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.967525] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_dead_retry = 300 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.967681] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_password = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.967846] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.968015] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.968190] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_pool_maxsize = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.968351] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.968511] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_sasl_enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.968686] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.968849] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_socket_timeout = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.969043] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.memcache_username = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.969218] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.proxies = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.969391] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.redis_db = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.969553] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.redis_password = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.969723] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.redis_sentinel_service_name = mymaster {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.969901] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.970125] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.redis_server = localhost:6379 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.970319] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.redis_socket_timeout = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.970483] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.redis_username = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.970643] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.retry_attempts = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.970806] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.retry_delay = 0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.970969] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.socket_keepalive_count = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.971161] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.socket_keepalive_idle = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.971304] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.socket_keepalive_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.971466] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.tls_allowed_ciphers = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.971622] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.tls_cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.971779] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.tls_certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.971942] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.tls_enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.972168] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cache.tls_keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.972351] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.972527] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.auth_type = password {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.972688] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.972862] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.catalog_info = volumev3::publicURL {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.973034] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.973207] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.973369] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.cross_az_attach = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.973526] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.debug = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.973692] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.endpoint_template = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.973847] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.http_retries = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.974013] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.974175] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.974343] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.os_region_name = RegionOne {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.974503] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.974658] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cinder.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.974827] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.974985] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.cpu_dedicated_set = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.975158] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.cpu_shared_set = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.975322] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.image_type_exclude_list = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.975486] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.975649] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.max_concurrent_disk_ops = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.975817] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.max_disk_devices_to_attach = -1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.975983] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.976171] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.976338] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.resource_provider_association_refresh = 300 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.976497] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.976657] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.shutdown_retry_interval = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.976835] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.977026] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] conductor.workers = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.977203] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] console.allowed_origins = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.977366] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] console.ssl_ciphers = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.977538] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] console.ssl_minimum_version = default {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.977709] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] consoleauth.enforce_session_timeout = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.977876] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] consoleauth.token_ttl = 600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.978062] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.978226] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.978393] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.978550] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.978704] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.978861] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.979051] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.979233] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.979397] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.979554] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.979711] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.979867] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.980068] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.980267] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.service_type = accelerator {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.980436] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.980596] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.980755] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.980913] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.981107] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.981276] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] cyborg.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.981448] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.asyncio_connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.981610] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.asyncio_slave_connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.981778] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.backend = sqlalchemy {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.981947] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.982155] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.connection_debug = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.982376] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.connection_parameters = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.982554] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.connection_recycle_time = 3600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.982719] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.connection_trace = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.982882] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.db_inc_retry_interval = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.983056] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.db_max_retries = 20 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.983230] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.db_max_retry_interval = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.983387] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.db_retry_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.983548] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.max_overflow = 50 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.983707] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.max_pool_size = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.983877] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.max_retries = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.984513] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.984513] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.mysql_wsrep_sync_wait = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.984513] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.pool_timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.984513] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.retry_interval = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.984672] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.slave_connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.984809] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.sqlite_synchronous = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.984970] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] database.use_db_reconnect = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.985162] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.asyncio_connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.985315] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.asyncio_slave_connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.985485] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.backend = sqlalchemy {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.985658] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.985822] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.connection_debug = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.985991] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.connection_parameters = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.986173] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.connection_recycle_time = 3600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.986335] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.connection_trace = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.986496] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.db_inc_retry_interval = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.986655] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.db_max_retries = 20 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.986816] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.db_max_retry_interval = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.987043] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.db_retry_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.987160] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.max_overflow = 50 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.987307] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.max_pool_size = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.987468] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.max_retries = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.987639] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.987799] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.987957] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.pool_timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.988135] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.retry_interval = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.988294] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.slave_connection = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.988454] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] api_database.sqlite_synchronous = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.988626] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] devices.enabled_mdev_types = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.988802] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.988973] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ephemeral_storage_encryption.default_format = luks {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.989190] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ephemeral_storage_encryption.enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.989360] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.989533] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.api_servers = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.989691] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.989848] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.990044] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.990233] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.990394] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.990556] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.debug = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.990718] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.default_trusted_certificate_ids = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.990876] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.enable_certificate_validation = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.991055] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.enable_rbd_download = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.991218] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.991382] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.991540] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.991696] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.991849] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.992017] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.num_retries = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.992197] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.rbd_ceph_conf = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.992362] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.rbd_connect_timeout = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.992534] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.rbd_pool = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.992695] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.rbd_user = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.992859] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.993027] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.993191] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.993357] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.service_type = image {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.993520] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.993673] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.993826] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.993989] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.994171] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.994342] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.verify_glance_signatures = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.994499] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] glance.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.994664] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] guestfs.debug = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.994830] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.994992] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.auth_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.995169] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.995326] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.995489] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.995647] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.995805] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.995962] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.996140] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.996301] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.996459] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.996615] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.996772] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.996928] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.997096] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.997270] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.service_type = shared-file-system {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.997433] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.share_apply_policy_timeout = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.997595] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.997751] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.997905] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.998071] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.998256] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.valid_interfaces = ['internal', 'public'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.998414] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] manila.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.998580] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] mks.enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.998927] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.999157] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] image_cache.manager_interval = 2400 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.999340] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] image_cache.precache_concurrency = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.999512] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] image_cache.remove_unused_base_images = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.999678] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 508.999844] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.000060] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] image_cache.subdirectory_name = _base {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.000265] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.api_max_retries = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.000434] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.api_retry_interval = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.000594] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.000754] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.auth_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.000915] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.001083] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.001254] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.001415] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.conductor_group = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.001572] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.001727] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.001881] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.002054] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.002217] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.002373] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.002527] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.002689] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.peer_list = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.002843] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.002998] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.003177] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.serial_console_state_timeout = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.003333] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.003496] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.service_type = baremetal {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.003653] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.shard = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.003814] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.003968] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.004139] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.004300] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.004475] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.004635] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ironic.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.004815] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.004988] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] key_manager.fixed_key = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.005189] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.005352] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.barbican_api_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.005511] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.barbican_endpoint = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.005683] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.barbican_endpoint_type = public {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.005839] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.barbican_region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.005993] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.006167] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.006330] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.006490] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.006646] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.006807] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.number_of_retries = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.006966] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.retry_delay = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.007142] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.send_service_user_token = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.007306] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.007463] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.007621] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.verify_ssl = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.007776] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican.verify_ssl_path = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.007938] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.008114] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.auth_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.008277] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.008432] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.008594] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.008754] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.008912] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.009103] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.009275] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] barbican_service_user.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.009442] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.approle_role_id = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.009601] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.approle_secret_id = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.009785] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.kv_mountpoint = secret {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.009942] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.kv_path = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.010146] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.kv_version = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.010314] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.namespace = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.010472] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.root_token_id = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.010625] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.ssl_ca_crt_file = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.010789] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.timeout = 60.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.010948] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.use_ssl = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.011131] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.011303] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.011460] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.011622] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.011781] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.011940] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.012109] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.012277] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.012435] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.012591] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.012746] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.012903] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.013073] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.013237] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.013407] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.service_type = identity {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.013566] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.013721] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.013875] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.014041] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.014223] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.014381] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] keystone.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.014568] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.ceph_mount_options = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.014875] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.ceph_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.015072] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.connection_uri = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.015242] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.cpu_mode = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.015412] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.cpu_model_extra_flags = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.015579] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.cpu_models = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.015750] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.cpu_power_governor_high = performance {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.015918] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.cpu_power_governor_low = powersave {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.016098] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.cpu_power_management = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.016299] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.016522] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.device_detach_attempts = 8 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.016696] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.device_detach_timeout = 20 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.016869] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.disk_cachemodes = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.017045] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.disk_prefix = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.017219] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.enabled_perf_events = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.017382] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.file_backed_memory = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.017546] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.gid_maps = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.017703] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.hw_disk_discard = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.017857] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.hw_machine_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.018039] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.images_rbd_ceph_conf = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.018213] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.018377] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.018548] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.images_rbd_glance_store_name = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.018717] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.images_rbd_pool = rbd {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.018887] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.images_type = default {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.019078] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.images_volume_group = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.019258] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.inject_key = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.019429] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.inject_partition = -2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.019592] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.inject_password = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.019756] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.iscsi_iface = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.019918] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.iser_use_multipath = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.020124] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_bandwidth = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.020306] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.020472] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_downtime = 500 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.020636] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.020797] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.020959] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_inbound_addr = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.021134] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.021397] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_permit_post_copy = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.021453] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_scheme = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.021629] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_timeout_action = abort {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.021793] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_tunnelled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.021950] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_uri = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.022128] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.live_migration_with_native_tls = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.022292] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.max_queues = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.022455] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.022690] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.022856] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.nfs_mount_options = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.023184] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.023424] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.023611] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.num_iser_scan_tries = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.023779] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.num_memory_encrypted_guests = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.023947] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.024128] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.num_pcie_ports = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.024299] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.num_volume_scan_tries = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.024502] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.pmem_namespaces = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.024627] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.quobyte_client_cfg = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.024929] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.025121] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rbd_connect_timeout = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.025301] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.025470] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.025629] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rbd_secret_uuid = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.025784] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rbd_user = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.025945] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.026131] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.remote_filesystem_transport = ssh {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.026297] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rescue_image_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.026455] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rescue_kernel_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.026612] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rescue_ramdisk_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.026781] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.026951] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.rx_queue_size = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.027136] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.smbfs_mount_options = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.027435] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.027622] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.snapshot_compression = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.027788] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.snapshot_image_format = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.028015] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.028190] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.sparse_logical_volumes = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.028358] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.swtpm_enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.028592] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.swtpm_group = tss {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.028702] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.swtpm_user = tss {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.028872] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.sysinfo_serial = unique {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.029066] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.tb_cache_size = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.029248] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.tx_queue_size = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.029419] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.uid_maps = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.029582] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.use_virtio_for_bridges = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.029753] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.virt_type = kvm {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.029923] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.volume_clear = zero {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.030119] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.volume_clear_size = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.030313] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.volume_enforce_multipath = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.030501] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.volume_use_multipath = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.030666] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.vzstorage_cache_path = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.030840] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.031019] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.vzstorage_mount_group = qemu {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.031205] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.vzstorage_mount_opts = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.031397] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.031702] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.031889] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.vzstorage_mount_user = stack {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.032076] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.032258] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.032430] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.auth_type = password {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.032592] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.032752] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.032914] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.033088] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.033250] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.033430] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.default_floating_pool = public {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.033588] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.033750] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.extension_sync_interval = 600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.033911] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.http_retries = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.034091] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.034257] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.034417] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.034589] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.034750] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.034920] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.ovs_bridge = br-int {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.035100] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.physnets = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.035275] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.region_name = RegionOne {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.035436] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.035681] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.service_metadata_proxy = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.035766] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.035933] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.service_type = network {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.036117] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.036283] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.036445] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.036606] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.036787] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.036949] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] neutron.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.037165] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] notifications.bdms_in_notifications = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.037353] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] notifications.default_level = INFO {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.037527] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] notifications.include_share_mapping = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.037700] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] notifications.notification_format = unversioned {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.037867] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] notifications.notify_on_state_change = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.038054] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.038240] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] pci.alias = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.038413] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] pci.device_spec = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.038578] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] pci.report_in_placement = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.038749] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.038919] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.auth_type = password {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.039123] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.039294] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.039451] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.039612] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.039768] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.039921] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.040092] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.default_domain_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.040254] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.default_domain_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.040411] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.domain_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.040563] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.domain_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.040718] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.040877] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.041045] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.041208] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.041389] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.041565] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.password = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.041728] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.project_domain_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.041890] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.project_domain_name = Default {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.042067] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.project_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.042243] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.project_name = service {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.042416] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.region_name = RegionOne {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.042577] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.042739] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.042905] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.service_type = placement {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.043083] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.043246] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.043409] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.043561] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.system_scope = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.043714] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.043868] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.trust_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.044041] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.user_domain_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.044216] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.user_domain_name = Default {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.044373] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.user_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.044543] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.username = nova {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.044720] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.044879] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] placement.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.045072] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.cores = 20 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.045243] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.count_usage_from_placement = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.045451] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.045587] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.injected_file_content_bytes = 10240 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.045753] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.injected_file_path_length = 255 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.045919] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.injected_files = 5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.046097] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.instances = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.046268] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.key_pairs = 100 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.046436] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.metadata_items = 128 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.046597] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.ram = 51200 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.046762] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.recheck_quota = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.046928] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.server_group_members = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.047137] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.server_groups = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.047366] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.unified_limits_resource_list = ['servers'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.047547] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] quota.unified_limits_resource_strategy = require {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.047721] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.047887] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.048059] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.image_metadata_prefilter = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.048230] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.048392] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.max_attempts = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.048564] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.max_placement_results = 1000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.048721] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.048883] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.query_placement_for_image_type_support = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.049077] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.049268] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] scheduler.workers = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.049457] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.049626] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.049804] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.049970] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.050151] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.050318] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.050478] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.050662] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.050832] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.host_subset_size = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.050995] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.051173] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.051355] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.image_props_weight_multiplier = 0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.051529] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.image_props_weight_setting = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.051705] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.051869] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.isolated_hosts = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.052044] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.isolated_images = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.052215] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.052377] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.052538] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.052699] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.pci_in_placement = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.052859] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.053028] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.053198] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.053359] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.053519] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.053680] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.053838] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.track_instance_changes = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.054023] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.054205] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] metrics.required = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.054369] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] metrics.weight_multiplier = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.054566] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.054690] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] metrics.weight_setting = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.055008] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.055387] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] serial_console.enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.055588] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] serial_console.port_range = 10000:20000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.055768] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.055941] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057748] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] serial_console.serialproxy_port = 6083 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057748] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057748] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.auth_type = password {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057748] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057748] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057748] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057748] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057980] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057980] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.send_service_user_token = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057980] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057980] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] service_user.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.057980] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.agent_enabled = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.058164] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.058394] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.058601] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.058773] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.html5proxy_port = 6082 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.058935] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.image_compression = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.059139] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.jpeg_compression = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.059313] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.playback_compression = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.059479] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.require_secure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.059653] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.server_listen = 127.0.0.1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.059822] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.060138] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.spice_direct_proxy_base_url = http://127.0.0.1:13002/nova {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.060323] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.streaming_mode = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.060487] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] spice.zlib_compression = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.060657] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] upgrade_levels.baseapi = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.060830] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] upgrade_levels.compute = auto {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.060992] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] upgrade_levels.conductor = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.061172] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] upgrade_levels.scheduler = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.061338] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.061498] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.auth_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.061657] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.061813] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.061973] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.062210] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.062400] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.062567] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.062728] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vendordata_dynamic_auth.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.062899] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.api_retry_count = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.063071] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.ca_file = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.063251] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.cache_prefix = devstack-image-cache {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.063419] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.cluster_name = testcl1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.063581] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.connection_pool_size = 10 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.063741] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.console_delay_seconds = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.063906] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.datastore_regex = ^datastore.* {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.064136] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.064315] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.host_password = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.064482] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.host_port = 443 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.064667] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.host_username = administrator@vsphere.local {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.064815] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.insecure = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.064976] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.integration_bridge = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.065155] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.maximum_objects = 100 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.065316] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.pbm_default_policy = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.065478] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.pbm_enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.065635] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.pbm_wsdl_location = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.065872] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.066062] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.serial_port_proxy_uri = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.066232] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.serial_port_service_uri = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.066401] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.task_poll_interval = 0.5 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.066575] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.use_linked_clone = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.066745] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.vnc_keymap = en-us {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.066907] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.vnc_port = 5900 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.067083] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vmware.vnc_port_total = 10000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.067277] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.auth_schemes = ['none'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.067457] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.067744] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.067928] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.068114] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.novncproxy_port = 6080 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.068306] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.server_listen = 127.0.0.1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.068485] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.068647] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.vencrypt_ca_certs = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.068804] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.vencrypt_client_cert = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.068962] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vnc.vencrypt_client_key = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.069173] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.069345] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.disable_deep_image_inspection = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.069505] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.069667] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.069825] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.069987] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.disable_rootwrap = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.070186] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.enable_numa_live_migration = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.070353] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.070521] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.070682] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.070843] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.libvirt_disable_apic = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.071012] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.071186] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.071349] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.071513] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.071674] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.071834] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.071994] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.072175] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.072336] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.072502] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.072687] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.072856] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.client_socket_timeout = 900 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.073034] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.default_pool_size = 1000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.073209] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.keep_alive = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.073380] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.max_header_line = 16384 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.073540] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.secure_proxy_ssl_header = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.073703] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.ssl_ca_file = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.073864] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.ssl_cert_file = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.074033] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.ssl_key_file = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.074239] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.tcp_keepidle = 600 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.074431] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.074604] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] zvm.ca_file = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.074773] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] zvm.cloud_connector_url = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.075072] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.075253] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] zvm.reachable_timeout = 300 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.075424] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.075599] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.075776] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.connection_string = messaging:// {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.075940] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.enabled = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.076126] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.es_doc_type = notification {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.076294] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.es_scroll_size = 10000 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.076463] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.es_scroll_time = 2m {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.076624] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.filter_error_trace = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.076791] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.hmac_keys = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.076959] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.sentinel_service_name = mymaster {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.077173] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.socket_timeout = 0.1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.077346] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.trace_requests = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.077508] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler.trace_sqlalchemy = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.077683] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler_jaeger.process_tags = {} {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.077846] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler_jaeger.service_name_prefix = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.078020] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] profiler_otlp.service_name_prefix = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.078191] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] remote_debug.host = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.078350] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] remote_debug.port = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.078526] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.078691] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.078853] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.079041] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.079273] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.079488] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.079687] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.079860] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.080035] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.080217] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.hostname = devstack {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.080381] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.080556] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.080725] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.080903] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.081117] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.kombu_reconnect_splay = 0.0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.081307] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.081478] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.081639] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.081812] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.081975] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.082153] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.082332] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.082531] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.082717] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.082887] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.083062] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.083230] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.083393] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.083555] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.083715] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.083879] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.ssl = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.084067] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.084245] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.084407] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.084574] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.084763] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.ssl_version = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.084944] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.085145] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.085331] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_notifications.retry = -1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.085532] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.085747] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_messaging_notifications.transport_url = **** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.085941] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.auth_section = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.086122] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.auth_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.086287] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.cafile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.086445] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.certfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.086606] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.collect_timing = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.086765] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.connect_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.086922] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.connect_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.087090] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.endpoint_id = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.087266] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.endpoint_interface = publicURL {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.087424] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.endpoint_override = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.087579] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.endpoint_region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.087775] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.endpoint_service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.087959] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.endpoint_service_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.088171] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.insecure = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.088365] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.keyfile = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.088537] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.max_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.088698] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.min_version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.088856] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.region_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.089061] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.retriable_status_codes = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.089223] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.service_name = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.089385] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.service_type = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.089549] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.split_loggers = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.089709] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.status_code_retries = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.089867] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.status_code_retry_delay = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.090064] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.timeout = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.090275] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.valid_interfaces = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.090491] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_limit.version = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.090677] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_reports.file_event_handler = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.090847] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.091044] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] oslo_reports.log_dir = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.091252] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.091425] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.091621] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.091821] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.092012] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.092182] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.092355] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.092516] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_ovs_privileged.group = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.092676] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.092841] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.093012] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.093238] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] vif_plug_ovs_privileged.user = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.093427] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.flat_interface = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.093610] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.093785] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.093958] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.094152] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.094329] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.094497] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.094661] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.094839] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.095023] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_ovs.isolate_vif = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.095199] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.095364] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.095534] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.095707] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_ovs.ovsdb_interface = native {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.095870] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] os_vif_ovs.per_port_bridge = False {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.096052] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] privsep_osbrick.capabilities = [21] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.096219] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] privsep_osbrick.group = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.096374] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] privsep_osbrick.helper_command = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.096537] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.096702] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.096871] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] privsep_osbrick.user = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.097069] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.097250] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] nova_sys_admin.group = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.097410] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] nova_sys_admin.helper_command = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.097575] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.097735] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.097892] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] nova_sys_admin.user = None {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2824}} [ 509.098036] env[68217]: DEBUG oslo_service.backend.eventlet.service [None req-dd6441d3-23ab-45b9-b05a-c37c05829084 None None] ******************************************************************************** {{(pid=68217) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2828}} [ 509.098473] env[68217]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 509.602819] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Getting list of instances from cluster (obj){ [ 509.602819] env[68217]: value = "domain-c8" [ 509.602819] env[68217]: _type = "ClusterComputeResource" [ 509.602819] env[68217]: } {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 509.603925] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338e2571-7cd1-439a-8225-db904a1182a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 509.612675] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Got total of 0 instances {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 509.613271] env[68217]: WARNING nova.virt.vmwareapi.driver [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 509.613734] env[68217]: INFO nova.virt.node [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Generated node identity 42aedcce-ee61-45e1-bf10-c06056d1f548 [ 509.613967] env[68217]: INFO nova.virt.node [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Wrote node identity 42aedcce-ee61-45e1-bf10-c06056d1f548 to /opt/stack/data/n-cpu-1/compute_id [ 510.116431] env[68217]: WARNING nova.compute.manager [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Compute nodes ['42aedcce-ee61-45e1-bf10-c06056d1f548'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 511.121994] env[68217]: INFO nova.compute.manager [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 512.128693] env[68217]: WARNING nova.compute.manager [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 512.129053] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 512.129185] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 512.129351] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 512.129506] env[68217]: DEBUG nova.compute.resource_tracker [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 512.130487] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738be1c2-d845-4baa-ad37-ba49de55438a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.138977] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b39503ca-b307-4f61-bcec-7fe6a9f553bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.152786] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34425b6b-aef1-41ff-8a1b-72fe44362d20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.159152] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b4e681-6be0-419a-9808-580b74dfcf89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 512.187674] env[68217]: DEBUG nova.compute.resource_tracker [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181108MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 512.187815] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 512.188032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 512.692779] env[68217]: WARNING nova.compute.resource_tracker [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] No compute node record for cpu-1:42aedcce-ee61-45e1-bf10-c06056d1f548: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 42aedcce-ee61-45e1-bf10-c06056d1f548 could not be found. [ 513.196257] env[68217]: INFO nova.compute.resource_tracker [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 42aedcce-ee61-45e1-bf10-c06056d1f548 [ 514.709686] env[68217]: DEBUG nova.compute.resource_tracker [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 514.709686] env[68217]: DEBUG nova.compute.resource_tracker [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 514.867550] env[68217]: INFO nova.scheduler.client.report [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] [req-8f06f887-8a62-4c44-aef0-9dd9e4ee3e43] Created resource provider record via placement API for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 514.888091] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6a8ec7-8611-476c-951a-77c6e20084f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.895203] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e26483-7818-451f-be35-e252994821ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.926046] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c031e4c-6027-4ef1-bb15-1e5afcb8103b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.932476] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9979cc18-1ab8-4f6f-b7ba-805ba0c880da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 514.945653] env[68217]: DEBUG nova.compute.provider_tree [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 515.486016] env[68217]: DEBUG nova.scheduler.client.report [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 515.486245] env[68217]: DEBUG nova.compute.provider_tree [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 0 to 1 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 515.486383] env[68217]: DEBUG nova.compute.provider_tree [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 515.534693] env[68217]: DEBUG nova.compute.provider_tree [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 1 to 2 during operation: update_traits {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 516.039348] env[68217]: DEBUG nova.compute.resource_tracker [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 516.039773] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.851s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 516.039773] env[68217]: DEBUG nova.service [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Creating RPC server for service compute {{(pid=68217) start /opt/stack/nova/nova/service.py:186}} [ 516.054283] env[68217]: DEBUG nova.service [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] Join ServiceGroup membership for this service compute {{(pid=68217) start /opt/stack/nova/nova/service.py:203}} [ 516.054472] env[68217]: DEBUG nova.servicegroup.drivers.db [None req-95def2f2-5164-4fce-ae79-c43bf5a27f29 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68217) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 555.578047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 555.578047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 555.582087] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 555.582326] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 556.070485] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "71dd4921-5859-421f-9e31-e9800adc9e3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 556.070694] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 556.082442] env[68217]: DEBUG nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 556.088532] env[68217]: DEBUG nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 556.423436] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "471e8a27-ed87-461a-b817-cd5ad208dd10" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 556.423642] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 556.575070] env[68217]: DEBUG nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 556.625617] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 556.625920] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 556.627848] env[68217]: INFO nova.compute.claims [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 556.632029] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 556.926121] env[68217]: DEBUG nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 557.105785] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 557.455020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 557.645452] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 557.646210] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 557.737568] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7427ead4-7bf9-4c11-b074-279a801b3b69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.746034] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228c3065-44e4-4c39-bb5b-149de2e8c1a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.786165] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b6dd303-74f7-4ee9-ae64-6cf8f2069561 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.794037] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d73d113c-6baf-4595-8c7f-0aa02a18862e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.807958] env[68217]: DEBUG nova.compute.provider_tree [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 558.149746] env[68217]: DEBUG nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 558.312026] env[68217]: DEBUG nova.scheduler.client.report [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 558.673597] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 558.675248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 558.680145] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 558.819148] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.193s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 558.819950] env[68217]: DEBUG nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 558.822967] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.191s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 558.824690] env[68217]: INFO nova.compute.claims [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 559.178749] env[68217]: DEBUG nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 559.189809] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "9ac81867-311c-42f3-b38f-67dc10f409c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 559.189809] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 559.333037] env[68217]: DEBUG nova.compute.utils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 559.333692] env[68217]: DEBUG nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 559.334414] env[68217]: DEBUG nova.network.neutron [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 559.695394] env[68217]: DEBUG nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 559.718116] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 559.769489] env[68217]: DEBUG nova.policy [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '46f10d1e9c154b1694542de71f8ad45a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ce977cd63b274faeb137d7b3a126a35d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 559.842750] env[68217]: DEBUG nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 560.035166] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd58da09-c460-43cc-a796-2154a64041ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.043465] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97bafee5-57fd-435b-94dd-0c387466545e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.095168] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34770bf2-f6fc-4095-8c56-f596535ef477 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.103275] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5786e822-ee8a-4c65-8c15-467816754e8d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.117947] env[68217]: DEBUG nova.compute.provider_tree [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 560.214146] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 560.214365] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 560.230555] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 560.627739] env[68217]: DEBUG nova.scheduler.client.report [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 560.700837] env[68217]: DEBUG nova.network.neutron [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Successfully created port: 87dc0f7f-05f9-47ef-a033-652d923dccac {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 560.720143] env[68217]: DEBUG nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 560.857060] env[68217]: DEBUG nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 560.904984] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 560.905238] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 560.905389] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 560.905565] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 560.905706] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 560.905853] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 560.906069] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 560.906231] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 560.906718] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 560.906895] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 560.907088] env[68217]: DEBUG nova.virt.hardware [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 560.909013] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d457399e-d36e-4d94-8e2f-6079dff68847 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.917839] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42097c03-5e12-4ff4-bd93-75d6701fd9e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.935894] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db059aa4-127f-4183-baf8-b37d639766db {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.131610] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.308s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 561.132194] env[68217]: DEBUG nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 561.139155] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.033s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 561.140372] env[68217]: INFO nova.compute.claims [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.248717] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 561.250017] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 561.259130] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 561.640251] env[68217]: DEBUG nova.compute.utils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 561.642535] env[68217]: DEBUG nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 561.642832] env[68217]: DEBUG nova.network.neutron [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 561.707617] env[68217]: DEBUG nova.policy [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cbcf73e36ca4583b53b9c0c2cfd0e3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cff35c33460c4a50ae6bee636d950504', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 561.757481] env[68217]: DEBUG nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 562.054274] env[68217]: DEBUG nova.network.neutron [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Successfully created port: d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 562.147132] env[68217]: DEBUG nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 562.284028] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 562.325795] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66be2460-d0b7-445d-ae6f-dbbdb5da94c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.334228] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8acad1d1-9e9a-4bea-b811-0b0abf935140 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.376275] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56044af3-7836-4cf1-b83f-e1a3d56f94c8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.387997] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8aecd9-a933-492e-9c11-fd26fc1328b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 562.408220] env[68217]: DEBUG nova.compute.provider_tree [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 562.910526] env[68217]: DEBUG nova.scheduler.client.report [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 563.056452] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 563.157308] env[68217]: DEBUG nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 563.186866] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 563.187109] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 563.187265] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 563.187440] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 563.187580] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 563.187718] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 563.187913] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 563.188115] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 563.188303] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 563.188465] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 563.188675] env[68217]: DEBUG nova.virt.hardware [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 563.189732] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05fdafa8-f7a7-467e-bc20-ca332619dd5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.197914] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c04a32-6489-4d66-9f55-a2982ec7065f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.420129] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.280s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 563.420713] env[68217]: DEBUG nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 563.424341] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.970s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 563.428207] env[68217]: INFO nova.compute.claims [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 563.518025] env[68217]: DEBUG nova.network.neutron [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Successfully updated port: 87dc0f7f-05f9-47ef-a033-652d923dccac {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 563.559791] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Getting list of instances from cluster (obj){ [ 563.559791] env[68217]: value = "domain-c8" [ 563.559791] env[68217]: _type = "ClusterComputeResource" [ 563.559791] env[68217]: } {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 563.561603] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-793fba57-560e-4f93-8ea0-0402bfac166d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.570578] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Got total of 0 instances {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 563.570785] env[68217]: WARNING nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] While synchronizing instance power states, found 3 instances in the database and 0 instances on the hypervisor. [ 563.570915] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid cdc84742-e20a-4e48-bfff-b3ac34405c1d {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 563.571065] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 11f9c054-62b9-4ac9-9651-5c85e7a86663 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 563.571310] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 71dd4921-5859-421f-9e31-e9800adc9e3c {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 563.572266] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 563.572266] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 563.572386] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "71dd4921-5859-421f-9e31-e9800adc9e3c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 563.572553] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 563.573199] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Getting list of instances from cluster (obj){ [ 563.573199] env[68217]: value = "domain-c8" [ 563.573199] env[68217]: _type = "ClusterComputeResource" [ 563.573199] env[68217]: } {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 563.575315] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71f5e498-ce06-4cb3-879c-735284fa60ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.583682] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Got total of 0 instances {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 563.935502] env[68217]: DEBUG nova.compute.utils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 563.938715] env[68217]: DEBUG nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 563.938919] env[68217]: DEBUG nova.network.neutron [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 564.019432] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "refresh_cache-cdc84742-e20a-4e48-bfff-b3ac34405c1d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.019576] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquired lock "refresh_cache-cdc84742-e20a-4e48-bfff-b3ac34405c1d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 564.019723] env[68217]: DEBUG nova.network.neutron [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.077779] env[68217]: DEBUG nova.policy [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ec37b30206c42b29f61fa09a9fc764b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '11e2667aafee473fa1aab9bfe6377931', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 564.220965] env[68217]: DEBUG nova.network.neutron [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Successfully updated port: d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 564.363271] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.363271] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.363271] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.363271] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.363271] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.363271] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.363570] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.363570] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 564.363570] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 564.445160] env[68217]: DEBUG nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 564.474746] env[68217]: DEBUG nova.compute.manager [req-c1dfae52-0bb6-4f4b-a581-dffcd20508ea req-2dddd5e9-065c-44d8-90f0-d108af068f16 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Received event network-vif-plugged-87dc0f7f-05f9-47ef-a033-652d923dccac {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 564.474957] env[68217]: DEBUG oslo_concurrency.lockutils [req-c1dfae52-0bb6-4f4b-a581-dffcd20508ea req-2dddd5e9-065c-44d8-90f0-d108af068f16 service nova] Acquiring lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 564.475185] env[68217]: DEBUG oslo_concurrency.lockutils [req-c1dfae52-0bb6-4f4b-a581-dffcd20508ea req-2dddd5e9-065c-44d8-90f0-d108af068f16 service nova] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 564.475343] env[68217]: DEBUG oslo_concurrency.lockutils [req-c1dfae52-0bb6-4f4b-a581-dffcd20508ea req-2dddd5e9-065c-44d8-90f0-d108af068f16 service nova] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 564.475503] env[68217]: DEBUG nova.compute.manager [req-c1dfae52-0bb6-4f4b-a581-dffcd20508ea req-2dddd5e9-065c-44d8-90f0-d108af068f16 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] No waiting events found dispatching network-vif-plugged-87dc0f7f-05f9-47ef-a033-652d923dccac {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 564.475660] env[68217]: WARNING nova.compute.manager [req-c1dfae52-0bb6-4f4b-a581-dffcd20508ea req-2dddd5e9-065c-44d8-90f0-d108af068f16 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Received unexpected event network-vif-plugged-87dc0f7f-05f9-47ef-a033-652d923dccac for instance with vm_state building and task_state spawning. [ 564.616116] env[68217]: DEBUG nova.network.neutron [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 564.660930] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aff641b-0e1c-4f20-ac29-1033a3d06f1f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.676931] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958f1bcc-fafb-4c5c-b54b-2817f10d9683 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.690020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "00d2302b-84d4-42d8-94c7-caf45b925ddf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 564.690020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "00d2302b-84d4-42d8-94c7-caf45b925ddf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 564.717847] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7fd0887-7357-4da7-8f9e-3725fcc4321c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.730518] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d2059e-8386-4d77-9486-08e1f5b3517f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 564.737468] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 564.737468] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 564.737468] env[68217]: DEBUG nova.network.neutron [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 564.754960] env[68217]: DEBUG nova.compute.provider_tree [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 564.869028] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 564.933954] env[68217]: DEBUG nova.network.neutron [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Successfully created port: bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 564.944437] env[68217]: DEBUG nova.network.neutron [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Updating instance_info_cache with network_info: [{"id": "87dc0f7f-05f9-47ef-a033-652d923dccac", "address": "fa:16:3e:8d:9c:f4", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87dc0f7f-05", "ovs_interfaceid": "87dc0f7f-05f9-47ef-a033-652d923dccac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.218978] env[68217]: DEBUG nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 565.257906] env[68217]: DEBUG nova.scheduler.client.report [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 565.281362] env[68217]: DEBUG nova.network.neutron [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 565.372047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 565.372275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 565.446741] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Releasing lock "refresh_cache-cdc84742-e20a-4e48-bfff-b3ac34405c1d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 565.447092] env[68217]: DEBUG nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Instance network_info: |[{"id": "87dc0f7f-05f9-47ef-a033-652d923dccac", "address": "fa:16:3e:8d:9c:f4", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87dc0f7f-05", "ovs_interfaceid": "87dc0f7f-05f9-47ef-a033-652d923dccac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 565.447629] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:9c:f4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87dc0f7f-05f9-47ef-a033-652d923dccac', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 565.462164] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 565.463290] env[68217]: DEBUG nova.network.neutron [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Updating instance_info_cache with network_info: [{"id": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "address": "fa:16:3e:90:07:ac", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2eec7fc-f6", "ovs_interfaceid": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 565.468062] env[68217]: DEBUG nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 565.468062] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-918e39f5-a237-49b6-8fae-a6b83c6e7be9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.481764] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Created folder: OpenStack in parent group-v4. [ 565.482013] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Creating folder: Project (ce977cd63b274faeb137d7b3a126a35d). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 565.482264] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-300d6b26-49f0-44c1-919d-c4cccb7a6eff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.495161] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 565.495161] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 565.495302] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 565.495397] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 565.495541] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 565.495683] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 565.495898] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 565.496040] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 565.496211] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 565.496395] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 565.496750] env[68217]: DEBUG nova.virt.hardware [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 565.498378] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30960f40-51d9-495f-ac57-2640c5dbe0b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.503678] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Created folder: Project (ce977cd63b274faeb137d7b3a126a35d) in parent group-v594094. [ 565.503932] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Creating folder: Instances. Parent ref: group-v594095. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 565.508258] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3d9c22bc-915e-4ab7-b00c-bff879a0aa76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.521512] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a55dd2a-6872-4dad-8062-3038f264e5a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.525396] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Created folder: Instances in parent group-v594095. [ 565.525702] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 565.525911] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 565.526437] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d99d17d8-f0ba-4242-b3f7-2bb0a815f0bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.558479] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 565.558479] env[68217]: value = "task-2960526" [ 565.558479] env[68217]: _type = "Task" [ 565.558479] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 565.566833] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960526, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 565.744143] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 565.764666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.340s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 565.765186] env[68217]: DEBUG nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 565.767816] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.088s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 565.769213] env[68217]: INFO nova.compute.claims [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 565.971761] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 565.972483] env[68217]: DEBUG nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Instance network_info: |[{"id": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "address": "fa:16:3e:90:07:ac", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2eec7fc-f6", "ovs_interfaceid": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 565.972599] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:07:ac', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e885ebd4-93ca-4e9e-8889-0f16bd91e61e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd2eec7fc-f623-4a8e-aee2-762e1eb58cf7', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 565.984067] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Creating folder: Project (cff35c33460c4a50ae6bee636d950504). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 565.984067] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2760ef7b-5397-4b8e-9779-977b82da5960 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.994388] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Created folder: Project (cff35c33460c4a50ae6bee636d950504) in parent group-v594094. [ 565.994388] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Creating folder: Instances. Parent ref: group-v594098. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 565.994566] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb5484e4-d5f0-4255-ae9c-0747332bd257 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.008588] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Created folder: Instances in parent group-v594098. [ 566.009222] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 566.009222] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 566.009390] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-66c9cc11-1fa2-4118-a2ce-b2778b872199 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.033453] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 566.033453] env[68217]: value = "task-2960529" [ 566.033453] env[68217]: _type = "Task" [ 566.033453] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.041780] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960529, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.068339] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960526, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.274060] env[68217]: DEBUG nova.compute.utils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 566.283885] env[68217]: DEBUG nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 566.283885] env[68217]: DEBUG nova.network.neutron [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 566.383154] env[68217]: DEBUG nova.policy [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0800d95dafdb4958acf5de250b287cb1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b280370d334d128f9ad30ed7bc2a9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 566.544223] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960529, 'name': CreateVM_Task, 'duration_secs': 0.339078} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 566.544340] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 566.555106] env[68217]: DEBUG oslo_vmware.service [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36d47b59-ccdc-41c3-bfdc-31242a390651 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.569242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 566.569681] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 566.570182] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 566.570473] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c306eece-6912-427b-9e71-e460427eaee3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.576527] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960526, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.580144] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 566.580144] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52edaf78-a3e1-bb3c-582e-4cad21972f9f" [ 566.580144] env[68217]: _type = "Task" [ 566.580144] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 566.590331] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52edaf78-a3e1-bb3c-582e-4cad21972f9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 566.713441] env[68217]: DEBUG nova.compute.manager [req-fb7f6a3f-1584-4453-8a89-5a9418091772 req-62eff930-16a3-4298-81be-1679f569add0 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Received event network-vif-plugged-d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 566.713618] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb7f6a3f-1584-4453-8a89-5a9418091772 req-62eff930-16a3-4298-81be-1679f569add0 service nova] Acquiring lock "11f9c054-62b9-4ac9-9651-5c85e7a86663-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 566.713765] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb7f6a3f-1584-4453-8a89-5a9418091772 req-62eff930-16a3-4298-81be-1679f569add0 service nova] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 566.713901] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb7f6a3f-1584-4453-8a89-5a9418091772 req-62eff930-16a3-4298-81be-1679f569add0 service nova] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 566.714123] env[68217]: DEBUG nova.compute.manager [req-fb7f6a3f-1584-4453-8a89-5a9418091772 req-62eff930-16a3-4298-81be-1679f569add0 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] No waiting events found dispatching network-vif-plugged-d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 566.714309] env[68217]: WARNING nova.compute.manager [req-fb7f6a3f-1584-4453-8a89-5a9418091772 req-62eff930-16a3-4298-81be-1679f569add0 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Received unexpected event network-vif-plugged-d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 for instance with vm_state building and task_state spawning. [ 566.782651] env[68217]: DEBUG nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 566.991780] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad894d4-5f87-47d5-ac69-b687b1849e47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.000365] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2f90d9-8cb2-4c3c-a676-beab597b4bca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.035707] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cc0d1c1-f125-4a43-93f4-f8dbc04095d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.044874] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1aadf99-d0b9-4ad8-97b3-21d5f31a8797 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.063962] env[68217]: DEBUG nova.compute.provider_tree [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.074289] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960526, 'name': CreateVM_Task, 'duration_secs': 1.120217} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 567.074471] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 567.076081] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.092381] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 567.092381] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 567.092587] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.093334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 567.093438] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 567.093773] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 567.096953] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 567.096953] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc6b4361-6bf7-4fd0-a684-886fd4b6cb5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.100227] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-55608bbb-1749-40be-8204-0e20864a15d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.106314] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 567.106314] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f271f1-f9ac-d4ee-8090-cc5e74f0716a" [ 567.106314] env[68217]: _type = "Task" [ 567.106314] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.115895] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f271f1-f9ac-d4ee-8090-cc5e74f0716a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.117281] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 567.117445] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 567.118265] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d2d630-089f-46eb-a31d-02423dc70a1f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.125540] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f2b2bba4-a157-4ddf-93d4-ca4a426ec50b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.131022] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 567.131022] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5267608e-f89d-c228-bf77-a2d349f275be" [ 567.131022] env[68217]: _type = "Task" [ 567.131022] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 567.138736] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5267608e-f89d-c228-bf77-a2d349f275be, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 567.199217] env[68217]: DEBUG nova.network.neutron [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Successfully created port: 446b7b15-f602-4a41-b415-19e2cff8535a {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.559504] env[68217]: DEBUG nova.network.neutron [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Successfully updated port: bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 567.572081] env[68217]: DEBUG nova.scheduler.client.report [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 567.618420] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 567.618679] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 567.618909] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.642055] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Preparing fetch location {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 567.643078] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Creating directory with path [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 567.643754] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-04e56db4-639e-4082-8988-93c705becf93 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.669698] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Created directory with path [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 567.671820] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Fetch image to [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 567.671820] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Downloading image file data 575ba628-84b6-4b0c-98ba-305166627d10 to [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk on the data store datastore1 {{(pid=68217) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 567.671820] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb04009b-4077-4d56-b303-50c0dddff1cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.680593] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-658f9fce-4813-4982-baea-f7e986891aa3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.694280] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5d0488c-53fa-47df-acdf-f9bc91fbabb7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.735308] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc2e56ed-b4da-40fd-981d-e16dd4821471 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.743403] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 567.743403] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 567.745316] env[68217]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f9609de1-dde2-4ada-a031-04acd52af127 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.798265] env[68217]: DEBUG nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 567.838428] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 567.838428] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 567.838428] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 567.838666] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 567.838795] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 567.839492] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 567.839492] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 567.839492] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 567.839673] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 567.839861] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 567.840140] env[68217]: DEBUG nova.virt.hardware [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 567.841027] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Downloading image file data 575ba628-84b6-4b0c-98ba-305166627d10 to the data store datastore1 {{(pid=68217) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 567.843242] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71d172ce-5c9f-47ad-a6d2-687c6cdbeb42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.852119] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d09ea8a-d1ca-436f-a2ca-3cb86aa9f61a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.947634] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68217) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 568.062976] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "refresh_cache-71dd4921-5859-421f-9e31-e9800adc9e3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.063287] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquired lock "refresh_cache-71dd4921-5859-421f-9e31-e9800adc9e3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 568.063622] env[68217]: DEBUG nova.network.neutron [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 568.080429] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.312s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 568.081009] env[68217]: DEBUG nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 568.086338] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.368s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 568.087671] env[68217]: INFO nova.compute.claims [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 568.410469] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 568.411604] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 568.592567] env[68217]: DEBUG nova.compute.manager [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Received event network-changed-87dc0f7f-05f9-47ef-a033-652d923dccac {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 568.592567] env[68217]: DEBUG nova.compute.manager [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Refreshing instance network info cache due to event network-changed-87dc0f7f-05f9-47ef-a033-652d923dccac. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 568.592567] env[68217]: DEBUG oslo_concurrency.lockutils [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] Acquiring lock "refresh_cache-cdc84742-e20a-4e48-bfff-b3ac34405c1d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.592567] env[68217]: DEBUG oslo_concurrency.lockutils [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] Acquired lock "refresh_cache-cdc84742-e20a-4e48-bfff-b3ac34405c1d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 568.592567] env[68217]: DEBUG nova.network.neutron [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Refreshing network info cache for port 87dc0f7f-05f9-47ef-a033-652d923dccac {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 568.594481] env[68217]: DEBUG nova.compute.utils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 568.602233] env[68217]: DEBUG nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 568.602438] env[68217]: DEBUG nova.network.neutron [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 568.655611] env[68217]: DEBUG nova.network.neutron [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 568.703497] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Completed reading data from the image iterator. {{(pid=68217) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 568.704267] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 568.756168] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Downloaded image file data 575ba628-84b6-4b0c-98ba-305166627d10 to vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk on the data store datastore1 {{(pid=68217) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 568.757315] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Caching image {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 568.757560] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copying Virtual Disk [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk to [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 568.757836] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c81c697-350d-4fa7-a4da-45920892fb10 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.769389] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 568.769389] env[68217]: value = "task-2960530" [ 568.769389] env[68217]: _type = "Task" [ 568.769389] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 568.779417] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960530, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 568.838529] env[68217]: DEBUG nova.policy [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91772cce5a7343d7bba596c00bc583f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a00814fe3eb4f1fa647f7876b11e86f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 569.035430] env[68217]: DEBUG nova.network.neutron [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Updating instance_info_cache with network_info: [{"id": "bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8", "address": "fa:16:3e:4e:c1:22", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfc8f377-ff", "ovs_interfaceid": "bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.104572] env[68217]: DEBUG nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 569.251653] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "af11d05f-4432-4505-bb52-226414488960" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 569.251706] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "af11d05f-4432-4505-bb52-226414488960" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 569.283369] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960530, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.403830] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac7925c-39b4-4e5f-9ab7-b58f8764799a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.414652] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3503601b-f888-40ae-a91a-7ba45f9157b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.452933] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bf0938-d948-430b-8746-b6d514d3e49d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.462338] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf70e7ed-80d1-425d-81b8-3b5425924c03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.470084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 569.470343] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 569.484583] env[68217]: DEBUG nova.compute.provider_tree [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.539628] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Releasing lock "refresh_cache-71dd4921-5859-421f-9e31-e9800adc9e3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 569.539955] env[68217]: DEBUG nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Instance network_info: |[{"id": "bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8", "address": "fa:16:3e:4e:c1:22", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfc8f377-ff", "ovs_interfaceid": "bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 569.540724] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:c1:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 569.547945] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Creating folder: Project (11e2667aafee473fa1aab9bfe6377931). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 569.548484] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3320531e-aa73-4501-b8e8-8b46deb6691a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.559537] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Created folder: Project (11e2667aafee473fa1aab9bfe6377931) in parent group-v594094. [ 569.559987] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Creating folder: Instances. Parent ref: group-v594101. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 569.559987] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-725b6e67-4ac1-490d-9f47-2de2a265e11f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.570105] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Created folder: Instances in parent group-v594101. [ 569.570336] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 569.570518] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 569.570722] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3f9b6a75-14b4-44ee-b772-b25a2b0a70cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.592200] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 569.592200] env[68217]: value = "task-2960533" [ 569.592200] env[68217]: _type = "Task" [ 569.592200] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.601024] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960533, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.781686] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960530, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689404} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 569.781945] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copied Virtual Disk [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk to [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 569.782136] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleting the datastore file [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 569.782462] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c076e14-d556-4aca-8436-b1adf614fbaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.788391] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 569.788391] env[68217]: value = "task-2960534" [ 569.788391] env[68217]: _type = "Task" [ 569.788391] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 569.797566] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960534, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 569.957936] env[68217]: DEBUG nova.network.neutron [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Successfully updated port: 446b7b15-f602-4a41-b415-19e2cff8535a {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 569.988387] env[68217]: DEBUG nova.scheduler.client.report [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 570.057474] env[68217]: DEBUG nova.network.neutron [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Successfully created port: 852ba444-6eea-4b2f-bbd8-58cdde27ee66 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 570.061871] env[68217]: DEBUG nova.network.neutron [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Updated VIF entry in instance network info cache for port 87dc0f7f-05f9-47ef-a033-652d923dccac. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 570.062301] env[68217]: DEBUG nova.network.neutron [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Updating instance_info_cache with network_info: [{"id": "87dc0f7f-05f9-47ef-a033-652d923dccac", "address": "fa:16:3e:8d:9c:f4", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.98", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87dc0f7f-05", "ovs_interfaceid": "87dc0f7f-05f9-47ef-a033-652d923dccac", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.090326] env[68217]: DEBUG nova.compute.manager [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Received event network-changed-d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 570.090505] env[68217]: DEBUG nova.compute.manager [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Refreshing instance network info cache due to event network-changed-d2eec7fc-f623-4a8e-aee2-762e1eb58cf7. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 570.090707] env[68217]: DEBUG oslo_concurrency.lockutils [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] Acquiring lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.090845] env[68217]: DEBUG oslo_concurrency.lockutils [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] Acquired lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 570.091008] env[68217]: DEBUG nova.network.neutron [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Refreshing network info cache for port d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 570.103976] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960533, 'name': CreateVM_Task, 'duration_secs': 0.36992} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.104231] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 570.104829] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.105080] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 570.105298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 570.105537] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c98611bd-0c3e-4f3a-a965-fcbe0e0a7ab5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.111867] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 570.111867] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52662618-e62d-c967-4ece-1fe89f970720" [ 570.111867] env[68217]: _type = "Task" [ 570.111867] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.117652] env[68217]: DEBUG nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 570.123127] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52662618-e62d-c967-4ece-1fe89f970720, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.154540] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 570.155293] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 570.156550] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 570.156550] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 570.156550] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 570.156550] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 570.156550] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 570.156955] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 570.157275] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 570.158362] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 570.158981] env[68217]: DEBUG nova.virt.hardware [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 570.161484] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-760e7dd2-08c6-4e76-af72-d049b5a497ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.172007] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4b286e9-5067-4d90-b19d-19aab46fe993 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.298961] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960534, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.026042} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.299248] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 570.299449] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Moving file from [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa/575ba628-84b6-4b0c-98ba-305166627d10 to [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10. {{(pid=68217) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 570.299737] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-e40f01c7-535a-4606-9d8b-bc9edd61b5bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.308094] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 570.308094] env[68217]: value = "task-2960535" [ 570.308094] env[68217]: _type = "Task" [ 570.308094] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.317741] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960535, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 570.462911] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "refresh_cache-471e8a27-ed87-461a-b817-cd5ad208dd10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.462911] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired lock "refresh_cache-471e8a27-ed87-461a-b817-cd5ad208dd10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 570.463043] env[68217]: DEBUG nova.network.neutron [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 570.495227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.408s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 570.495665] env[68217]: DEBUG nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 570.499530] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.269s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 570.501254] env[68217]: INFO nova.compute.claims [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 570.566277] env[68217]: DEBUG oslo_concurrency.lockutils [req-6010ba4f-38e2-42cb-8ca9-d6f9bd35223a req-7001eba6-233a-43b4-ad1e-911e0da56027 service nova] Releasing lock "refresh_cache-cdc84742-e20a-4e48-bfff-b3ac34405c1d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 570.628225] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 570.628466] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 570.628673] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.821051] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960535, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.028097} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 570.824891] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] File moved {{(pid=68217) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 570.824891] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Cleaning up location [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 570.824891] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleting the datastore file [datastore1] vmware_temp/595a6c38-888a-4504-8951-04f34d7ca9aa {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 570.824891] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2bccbd41-abd4-45ca-a434-3bd1514b6efc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 570.829153] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 570.829153] env[68217]: value = "task-2960536" [ 570.829153] env[68217]: _type = "Task" [ 570.829153] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 570.841785] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960536, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.005322] env[68217]: DEBUG nova.compute.utils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 571.007290] env[68217]: DEBUG nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 571.009705] env[68217]: DEBUG nova.network.neutron [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 571.044693] env[68217]: DEBUG nova.network.neutron [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Updated VIF entry in instance network info cache for port d2eec7fc-f623-4a8e-aee2-762e1eb58cf7. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 571.045042] env[68217]: DEBUG nova.network.neutron [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Updating instance_info_cache with network_info: [{"id": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "address": "fa:16:3e:90:07:ac", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2eec7fc-f6", "ovs_interfaceid": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.074674] env[68217]: DEBUG nova.network.neutron [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 571.099426] env[68217]: DEBUG nova.policy [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6f1b70f89609479da281cd2894797125', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '29ed3216c24841d0859cdc543f5b12bf', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 571.343205] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960536, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025246} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.343205] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 571.343800] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ffd9e902-7f36-431f-a69c-c14e0d28a8e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.349349] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 571.349349] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52090b9f-86e1-d671-307d-63c601e9472d" [ 571.349349] env[68217]: _type = "Task" [ 571.349349] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.359222] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52090b9f-86e1-d671-307d-63c601e9472d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.516975] env[68217]: DEBUG nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 571.549868] env[68217]: DEBUG oslo_concurrency.lockutils [req-d288860b-0a81-4fa9-9ec4-fc04b5095615 req-9f7a7a72-545b-4623-8453-59558a242730 service nova] Releasing lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 571.600731] env[68217]: DEBUG nova.network.neutron [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Updating instance_info_cache with network_info: [{"id": "446b7b15-f602-4a41-b415-19e2cff8535a", "address": "fa:16:3e:48:dc:e0", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap446b7b15-f6", "ovs_interfaceid": "446b7b15-f602-4a41-b415-19e2cff8535a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.745654] env[68217]: DEBUG nova.network.neutron [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Successfully created port: 3f82f80c-ad2c-4e9a-a247-99c0d137f553 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 571.864066] env[68217]: DEBUG nova.compute.manager [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Received event network-vif-plugged-bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 571.864285] env[68217]: DEBUG oslo_concurrency.lockutils [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] Acquiring lock "71dd4921-5859-421f-9e31-e9800adc9e3c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 571.864480] env[68217]: DEBUG oslo_concurrency.lockutils [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 571.864888] env[68217]: DEBUG oslo_concurrency.lockutils [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 571.864888] env[68217]: DEBUG nova.compute.manager [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] No waiting events found dispatching network-vif-plugged-bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 571.864993] env[68217]: WARNING nova.compute.manager [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Received unexpected event network-vif-plugged-bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 for instance with vm_state building and task_state spawning. [ 571.865739] env[68217]: DEBUG nova.compute.manager [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Received event network-changed-bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 571.865739] env[68217]: DEBUG nova.compute.manager [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Refreshing instance network info cache due to event network-changed-bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 571.865739] env[68217]: DEBUG oslo_concurrency.lockutils [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] Acquiring lock "refresh_cache-71dd4921-5859-421f-9e31-e9800adc9e3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.865739] env[68217]: DEBUG oslo_concurrency.lockutils [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] Acquired lock "refresh_cache-71dd4921-5859-421f-9e31-e9800adc9e3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 571.865739] env[68217]: DEBUG nova.network.neutron [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Refreshing network info cache for port bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 571.880056] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52090b9f-86e1-d671-307d-63c601e9472d, 'name': SearchDatastore_Task, 'duration_secs': 0.008757} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.883909] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 571.883909] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 11f9c054-62b9-4ac9-9651-5c85e7a86663/11f9c054-62b9-4ac9-9651-5c85e7a86663.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 571.883909] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132af753-d3e4-4de6-9b43-365df05e4299 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.885532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 571.885802] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 571.886056] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6d171ee7-e1d1-4a12-9dae-16149f2c1825 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.888661] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0d8398d-2ea4-4933-a94d-2a2686d4f337 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.897576] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b90c058-dc2d-43c5-bafd-3ff6cdedb6fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.904054] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 571.904054] env[68217]: value = "task-2960537" [ 571.904054] env[68217]: _type = "Task" [ 571.904054] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.942552] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e3383a2-8db5-4a0a-bef2-99ff26a64b55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.945379] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 571.946016] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 571.949194] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6600962-7658-4e6c-8b5c-e6a96539aef7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.952624] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960537, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 571.964031] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25bb900-5b56-49a7-8855-754cdbd35e0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.968742] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 571.968742] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522d6312-0073-e4c3-922a-9e55c78e4695" [ 571.968742] env[68217]: _type = "Task" [ 571.968742] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 571.981183] env[68217]: DEBUG nova.compute.provider_tree [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 571.989999] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522d6312-0073-e4c3-922a-9e55c78e4695, 'name': SearchDatastore_Task, 'duration_secs': 0.010595} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 571.991318] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba07d28e-bbcb-4054-a978-09385e26ced0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.997324] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 571.997324] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522668d0-4903-740c-bd8e-66866de30140" [ 571.997324] env[68217]: _type = "Task" [ 571.997324] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.006024] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522668d0-4903-740c-bd8e-66866de30140, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.109963] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Releasing lock "refresh_cache-471e8a27-ed87-461a-b817-cd5ad208dd10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 572.110849] env[68217]: DEBUG nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Instance network_info: |[{"id": "446b7b15-f602-4a41-b415-19e2cff8535a", "address": "fa:16:3e:48:dc:e0", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap446b7b15-f6", "ovs_interfaceid": "446b7b15-f602-4a41-b415-19e2cff8535a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 572.110959] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:48:dc:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '446b7b15-f602-4a41-b415-19e2cff8535a', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 572.120474] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Creating folder: Project (74b280370d334d128f9ad30ed7bc2a9b). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 572.120777] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-218dca55-a31e-49f0-ba74-2c0ec35be530 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.133980] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Created folder: Project (74b280370d334d128f9ad30ed7bc2a9b) in parent group-v594094. [ 572.136019] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Creating folder: Instances. Parent ref: group-v594104. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 572.136019] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-86bdee03-202f-4c8e-af81-a8ff01daf341 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.146711] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Created folder: Instances in parent group-v594104. [ 572.147054] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 572.147285] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 572.147515] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-daa49de3-b27e-4d69-98f8-94d6d3b3198d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.176611] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 572.176611] env[68217]: value = "task-2960540" [ 572.176611] env[68217]: _type = "Task" [ 572.176611] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.190404] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960540, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.266762] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 572.266899] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 572.307057] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 572.307339] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 572.353826] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "63e0fc9e-5182-4781-b007-69e2134718df" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 572.353826] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "63e0fc9e-5182-4781-b007-69e2134718df" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 572.420969] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960537, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.484072] env[68217]: DEBUG nova.scheduler.client.report [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 572.507731] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522668d0-4903-740c-bd8e-66866de30140, 'name': SearchDatastore_Task, 'duration_secs': 0.009448} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.508724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 572.509082] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] cdc84742-e20a-4e48-bfff-b3ac34405c1d/cdc84742-e20a-4e48-bfff-b3ac34405c1d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 572.509441] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.509662] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 572.510550] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-210f36bc-b277-448f-b261-16fce86dfee4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.513364] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f0336b4b-83ba-4192-a406-22cedde500be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.523434] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 572.523434] env[68217]: value = "task-2960541" [ 572.523434] env[68217]: _type = "Task" [ 572.523434] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.524914] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 572.524914] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 572.530338] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed4e0fb6-e74b-4913-8d75-58cdad556ed6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.534891] env[68217]: DEBUG nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 572.542134] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.544491] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 572.544491] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210beba-d930-a46b-d0af-df93c2b4350b" [ 572.544491] env[68217]: _type = "Task" [ 572.544491] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.551338] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210beba-d930-a46b-d0af-df93c2b4350b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.577258] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 572.577504] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 572.577657] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 572.577846] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 572.577970] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 572.578294] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 572.578467] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 572.578624] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 572.578782] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 572.578983] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 572.579176] env[68217]: DEBUG nova.virt.hardware [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 572.580046] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33ec5a91-133a-4946-bf2b-b8e338fbdf03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.589431] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255cf2d6-e156-4991-93f3-68d7881066ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.690164] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960540, 'name': CreateVM_Task, 'duration_secs': 0.398235} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.690164] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 572.690853] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 572.691032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 572.691367] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 572.691878] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8beef407-8c2a-4ae6-8054-3fa04073dbc0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.698591] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 572.698591] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5245eb41-0ec2-f6bd-4327-3a8763a8c458" [ 572.698591] env[68217]: _type = "Task" [ 572.698591] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.715428] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5245eb41-0ec2-f6bd-4327-3a8763a8c458, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.920376] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960537, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543449} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 572.920636] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 11f9c054-62b9-4ac9-9651-5c85e7a86663/11f9c054-62b9-4ac9-9651-5c85e7a86663.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 572.922691] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 572.922691] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2ce93d33-9780-4192-bc7b-fc197a233976 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.932303] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 572.932303] env[68217]: value = "task-2960542" [ 572.932303] env[68217]: _type = "Task" [ 572.932303] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 572.951538] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 572.994585] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.495s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 572.995387] env[68217]: DEBUG nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 573.002252] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.743s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.004652] env[68217]: INFO nova.compute.claims [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 573.026205] env[68217]: DEBUG nova.network.neutron [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Successfully updated port: 852ba444-6eea-4b2f-bbd8-58cdde27ee66 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 573.052020] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483667} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.053440] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] cdc84742-e20a-4e48-bfff-b3ac34405c1d/cdc84742-e20a-4e48-bfff-b3ac34405c1d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 573.053656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 573.055113] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a2a1114-5e53-411e-b062-895ce7fd4d65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.086929] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 573.086929] env[68217]: value = "task-2960543" [ 573.086929] env[68217]: _type = "Task" [ 573.086929] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.086929] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210beba-d930-a46b-d0af-df93c2b4350b, 'name': SearchDatastore_Task, 'duration_secs': 0.019655} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.086929] env[68217]: DEBUG nova.network.neutron [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Updated VIF entry in instance network info cache for port bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 573.108169] env[68217]: DEBUG nova.network.neutron [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Updating instance_info_cache with network_info: [{"id": "bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8", "address": "fa:16:3e:4e:c1:22", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.51", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfc8f377-ff", "ovs_interfaceid": "bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 573.108169] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c59bdc28-45b7-4e29-8d4c-70393dbabfcd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.109617] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960543, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.109617] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 573.109617] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5229b0ec-5849-9ef1-cb63-10abe2acf027" [ 573.109617] env[68217]: _type = "Task" [ 573.109617] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.109617] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5229b0ec-5849-9ef1-cb63-10abe2acf027, 'name': SearchDatastore_Task, 'duration_secs': 0.008243} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.109617] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 573.109878] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 71dd4921-5859-421f-9e31-e9800adc9e3c/71dd4921-5859-421f-9e31-e9800adc9e3c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 573.109878] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bd8f2237-3540-4515-a0e7-4ed1483a4855 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.111214] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 573.111214] env[68217]: value = "task-2960544" [ 573.111214] env[68217]: _type = "Task" [ 573.111214] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.120241] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960544, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.212138] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5245eb41-0ec2-f6bd-4327-3a8763a8c458, 'name': SearchDatastore_Task, 'duration_secs': 0.065611} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.212468] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 573.212700] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 573.212897] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.213201] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 573.213905] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 573.213905] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef17befd-1c44-44df-8ade-6d23ccfc4a6c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.222047] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 573.222859] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 573.223106] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa5fb731-8299-45d3-a836-baa647895a17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.230758] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 573.230758] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fd4a5a-6cae-f67c-b69e-01e8a8d9364a" [ 573.230758] env[68217]: _type = "Task" [ 573.230758] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.240779] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fd4a5a-6cae-f67c-b69e-01e8a8d9364a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.446517] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.093278} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.448068] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 573.448068] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb060f5f-438d-4f82-a5c4-8700ff4a7a7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.482535] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Reconfiguring VM instance instance-00000002 to attach disk [datastore1] 11f9c054-62b9-4ac9-9651-5c85e7a86663/11f9c054-62b9-4ac9-9651-5c85e7a86663.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 573.482535] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0d7f3630-71c5-4b1b-9817-2d5cc169e3e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.515042] env[68217]: DEBUG nova.compute.utils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 573.519140] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 573.519140] env[68217]: value = "task-2960545" [ 573.519140] env[68217]: _type = "Task" [ 573.519140] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.520955] env[68217]: DEBUG nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 573.521836] env[68217]: DEBUG nova.network.neutron [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 573.524970] env[68217]: DEBUG nova.compute.manager [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Received event network-vif-plugged-446b7b15-f602-4a41-b415-19e2cff8535a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 573.525860] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] Acquiring lock "471e8a27-ed87-461a-b817-cd5ad208dd10-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 573.525860] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 573.525860] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 573.525860] env[68217]: DEBUG nova.compute.manager [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] No waiting events found dispatching network-vif-plugged-446b7b15-f602-4a41-b415-19e2cff8535a {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 573.526196] env[68217]: WARNING nova.compute.manager [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Received unexpected event network-vif-plugged-446b7b15-f602-4a41-b415-19e2cff8535a for instance with vm_state building and task_state spawning. [ 573.526196] env[68217]: DEBUG nova.compute.manager [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Received event network-changed-446b7b15-f602-4a41-b415-19e2cff8535a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 573.526346] env[68217]: DEBUG nova.compute.manager [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Refreshing instance network info cache due to event network-changed-446b7b15-f602-4a41-b415-19e2cff8535a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 573.526547] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] Acquiring lock "refresh_cache-471e8a27-ed87-461a-b817-cd5ad208dd10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.526678] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] Acquired lock "refresh_cache-471e8a27-ed87-461a-b817-cd5ad208dd10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 573.526923] env[68217]: DEBUG nova.network.neutron [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Refreshing network info cache for port 446b7b15-f602-4a41-b415-19e2cff8535a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 573.545150] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "refresh_cache-4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 573.545307] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired lock "refresh_cache-4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 573.545463] env[68217]: DEBUG nova.network.neutron [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 573.547019] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960545, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.584051] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960543, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06887} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.584547] env[68217]: DEBUG oslo_concurrency.lockutils [req-92311694-722b-4fe7-a403-58e5c5b0cd79 req-cf5f8e47-4435-4cc3-9194-c115e58efcdc service nova] Releasing lock "refresh_cache-71dd4921-5859-421f-9e31-e9800adc9e3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 573.584890] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 573.585740] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87b8478c-7f4a-4603-bc53-b1746a02dd56 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.615259] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Reconfiguring VM instance instance-00000001 to attach disk [datastore1] cdc84742-e20a-4e48-bfff-b3ac34405c1d/cdc84742-e20a-4e48-bfff-b3ac34405c1d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 573.615698] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9629a1c4-5e8b-4ea7-9302-b88224604fdc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.645389] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960544, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479843} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.646747] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 71dd4921-5859-421f-9e31-e9800adc9e3c/71dd4921-5859-421f-9e31-e9800adc9e3c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 573.647038] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 573.647384] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 573.647384] env[68217]: value = "task-2960546" [ 573.647384] env[68217]: _type = "Task" [ 573.647384] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.647598] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4a73f12d-68e0-4fe7-a3b5-c063c1b355cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.658506] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960546, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.659723] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 573.659723] env[68217]: value = "task-2960547" [ 573.659723] env[68217]: _type = "Task" [ 573.659723] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.667104] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960547, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 573.669272] env[68217]: DEBUG nova.policy [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cbcf73e36ca4583b53b9c0c2cfd0e3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cff35c33460c4a50ae6bee636d950504', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 573.741277] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fd4a5a-6cae-f67c-b69e-01e8a8d9364a, 'name': SearchDatastore_Task, 'duration_secs': 0.008481} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 573.742057] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb5fec65-4401-4cff-9638-d59596f3c587 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.746823] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 573.746823] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523438c0-de62-ad8e-6b1a-f8f7f354dc14" [ 573.746823] env[68217]: _type = "Task" [ 573.746823] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 573.754411] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523438c0-de62-ad8e-6b1a-f8f7f354dc14, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.026288] env[68217]: DEBUG nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 574.042128] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960545, 'name': ReconfigVM_Task, 'duration_secs': 0.311072} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.043256] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Reconfigured VM instance instance-00000002 to attach disk [datastore1] 11f9c054-62b9-4ac9-9651-5c85e7a86663/11f9c054-62b9-4ac9-9651-5c85e7a86663.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 574.043803] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6784c6e-7442-40e7-a214-1baa73e2eaf1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.053952] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 574.053952] env[68217]: value = "task-2960548" [ 574.053952] env[68217]: _type = "Task" [ 574.053952] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.068905] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960548, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.158773] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960546, 'name': ReconfigVM_Task, 'duration_secs': 0.306346} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.159123] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Reconfigured VM instance instance-00000001 to attach disk [datastore1] cdc84742-e20a-4e48-bfff-b3ac34405c1d/cdc84742-e20a-4e48-bfff-b3ac34405c1d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 574.160080] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a8e44678-5f27-4c6e-be6a-38c9ba7e8b19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.176040] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960547, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068557} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.177785] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 574.178333] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 574.178333] env[68217]: value = "task-2960549" [ 574.178333] env[68217]: _type = "Task" [ 574.178333] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.179047] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa05688e-db08-4442-a28b-b4e1e2bce8b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.215914] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Reconfiguring VM instance instance-00000003 to attach disk [datastore1] 71dd4921-5859-421f-9e31-e9800adc9e3c/71dd4921-5859-421f-9e31-e9800adc9e3c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 574.216745] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960549, 'name': Rename_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.217273] env[68217]: DEBUG nova.network.neutron [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 574.219887] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeb38a21-7361-47a5-b793-1d26af1c11c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.247550] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 574.247550] env[68217]: value = "task-2960550" [ 574.247550] env[68217]: _type = "Task" [ 574.247550] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.263222] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523438c0-de62-ad8e-6b1a-f8f7f354dc14, 'name': SearchDatastore_Task, 'duration_secs': 0.008621} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.266488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 574.266697] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 471e8a27-ed87-461a-b817-cd5ad208dd10/471e8a27-ed87-461a-b817-cd5ad208dd10.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 574.266867] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960550, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.267148] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88b11776-0aa0-471b-badc-f7e752d31370 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.277230] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 574.277230] env[68217]: value = "task-2960551" [ 574.277230] env[68217]: _type = "Task" [ 574.277230] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.285617] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960551, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.352927] env[68217]: DEBUG nova.network.neutron [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Successfully created port: be83d87a-5096-4216-a402-35655f496cc0 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 574.389637] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f436999-04b4-4482-82c8-3f1a49c15ccb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.399890] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-578689a7-61db-4419-9038-790e542f6a20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.435302] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1436d5f8-5a92-41de-a30d-85c40e2886ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.443815] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86de877e-d60e-4d38-b83c-18d2dc28639e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.465724] env[68217]: DEBUG nova.compute.provider_tree [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 574.568024] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960548, 'name': Rename_Task, 'duration_secs': 0.140857} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.568024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 574.568024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cf135fe7-7f3e-4958-b881-3a97fe12cb17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.574137] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 574.574137] env[68217]: value = "task-2960552" [ 574.574137] env[68217]: _type = "Task" [ 574.574137] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.583673] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960552, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.694198] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960549, 'name': Rename_Task, 'duration_secs': 0.14452} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.694929] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 574.695364] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ed612ba8-60ae-4a5e-9c65-d2da2525f6e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.705018] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 574.705018] env[68217]: value = "task-2960553" [ 574.705018] env[68217]: _type = "Task" [ 574.705018] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.712495] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960553, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.766530] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960550, 'name': ReconfigVM_Task, 'duration_secs': 0.365245} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.767234] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Reconfigured VM instance instance-00000003 to attach disk [datastore1] 71dd4921-5859-421f-9e31-e9800adc9e3c/71dd4921-5859-421f-9e31-e9800adc9e3c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 574.768105] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ea38e328-f37b-45d6-808e-33924c405cd7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.774914] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 574.774914] env[68217]: value = "task-2960554" [ 574.774914] env[68217]: _type = "Task" [ 574.774914] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.795382] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960554, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.800364] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960551, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.494816} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 574.800720] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 471e8a27-ed87-461a-b817-cd5ad208dd10/471e8a27-ed87-461a-b817-cd5ad208dd10.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 574.800985] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 574.801350] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78ba089e-44b3-4bbb-a03a-6707686f0935 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 574.811079] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 574.811079] env[68217]: value = "task-2960555" [ 574.811079] env[68217]: _type = "Task" [ 574.811079] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 574.821591] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960555, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 574.969510] env[68217]: DEBUG nova.scheduler.client.report [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 575.041438] env[68217]: DEBUG nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 575.071387] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 575.071630] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 575.071781] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 575.071957] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 575.072141] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 575.072305] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 575.072485] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 575.073011] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 575.073216] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 575.073385] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 575.073561] env[68217]: DEBUG nova.virt.hardware [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 575.074573] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646f080f-750b-46a2-8d0f-9a3521e9d447 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.088071] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960552, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.092170] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2534d11-3ce6-4e03-85e8-6278a37fa84b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.168110] env[68217]: DEBUG nova.network.neutron [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Updating instance_info_cache with network_info: [{"id": "852ba444-6eea-4b2f-bbd8-58cdde27ee66", "address": "fa:16:3e:eb:f4:03", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap852ba444-6e", "ovs_interfaceid": "852ba444-6eea-4b2f-bbd8-58cdde27ee66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.216139] env[68217]: DEBUG oslo_vmware.api [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960553, 'name': PowerOnVM_Task, 'duration_secs': 0.493909} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.216465] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 575.216939] env[68217]: INFO nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Took 14.36 seconds to spawn the instance on the hypervisor. [ 575.219238] env[68217]: DEBUG nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 575.219238] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4200a70c-3c42-4cf3-80dc-27cb403c8463 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.288723] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960554, 'name': Rename_Task, 'duration_secs': 0.168889} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.289062] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 575.289684] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e9e7a28-f2d6-458a-83a9-bd02646d3827 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.297029] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 575.297029] env[68217]: value = "task-2960556" [ 575.297029] env[68217]: _type = "Task" [ 575.297029] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.306011] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960556, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.316363] env[68217]: DEBUG nova.network.neutron [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Updated VIF entry in instance network info cache for port 446b7b15-f602-4a41-b415-19e2cff8535a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 575.316363] env[68217]: DEBUG nova.network.neutron [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Updating instance_info_cache with network_info: [{"id": "446b7b15-f602-4a41-b415-19e2cff8535a", "address": "fa:16:3e:48:dc:e0", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.207", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap446b7b15-f6", "ovs_interfaceid": "446b7b15-f602-4a41-b415-19e2cff8535a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 575.330177] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960555, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074947} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.330651] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 575.331698] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad241e5b-c28b-49da-8b3b-d360b97dfc4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.359541] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Reconfiguring VM instance instance-00000004 to attach disk [datastore1] 471e8a27-ed87-461a-b817-cd5ad208dd10/471e8a27-ed87-461a-b817-cd5ad208dd10.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 575.360794] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-396582da-3a47-4def-b211-bdeff975fbc0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.382685] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 575.382685] env[68217]: value = "task-2960557" [ 575.382685] env[68217]: _type = "Task" [ 575.382685] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.391477] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960557, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.482483] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.480s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 575.482894] env[68217]: DEBUG nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 575.487268] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.204s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.489436] env[68217]: INFO nova.compute.claims [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 575.506261] env[68217]: DEBUG nova.network.neutron [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Successfully updated port: 3f82f80c-ad2c-4e9a-a247-99c0d137f553 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 575.555124] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "678acc61-1c94-4152-b4e8-7569ab169ab9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 575.555124] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.588970] env[68217]: DEBUG oslo_vmware.api [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960552, 'name': PowerOnVM_Task, 'duration_secs': 0.514463} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 575.590364] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 575.590673] env[68217]: INFO nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Took 12.43 seconds to spawn the instance on the hypervisor. [ 575.590734] env[68217]: DEBUG nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 575.591617] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-745b174b-8be9-48a5-85f8-d4cf7cd7c5a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.611385] env[68217]: DEBUG nova.compute.manager [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Received event network-vif-plugged-852ba444-6eea-4b2f-bbd8-58cdde27ee66 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 575.611638] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] Acquiring lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 575.611791] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 575.611952] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 575.613130] env[68217]: DEBUG nova.compute.manager [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] No waiting events found dispatching network-vif-plugged-852ba444-6eea-4b2f-bbd8-58cdde27ee66 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 575.613402] env[68217]: WARNING nova.compute.manager [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Received unexpected event network-vif-plugged-852ba444-6eea-4b2f-bbd8-58cdde27ee66 for instance with vm_state building and task_state spawning. [ 575.613658] env[68217]: DEBUG nova.compute.manager [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Received event network-changed-852ba444-6eea-4b2f-bbd8-58cdde27ee66 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 575.613658] env[68217]: DEBUG nova.compute.manager [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Refreshing instance network info cache due to event network-changed-852ba444-6eea-4b2f-bbd8-58cdde27ee66. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 575.613857] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] Acquiring lock "refresh_cache-4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 575.671844] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Releasing lock "refresh_cache-4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 575.671844] env[68217]: DEBUG nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Instance network_info: |[{"id": "852ba444-6eea-4b2f-bbd8-58cdde27ee66", "address": "fa:16:3e:eb:f4:03", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap852ba444-6e", "ovs_interfaceid": "852ba444-6eea-4b2f-bbd8-58cdde27ee66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 575.672027] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] Acquired lock "refresh_cache-4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 575.672027] env[68217]: DEBUG nova.network.neutron [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Refreshing network info cache for port 852ba444-6eea-4b2f-bbd8-58cdde27ee66 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 575.673280] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:eb:f4:03', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '852ba444-6eea-4b2f-bbd8-58cdde27ee66', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 575.681284] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Creating folder: Project (0a00814fe3eb4f1fa647f7876b11e86f). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 575.682504] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d15b0ed2-c5e1-403f-a8d2-091b5d24267b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.694364] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Created folder: Project (0a00814fe3eb4f1fa647f7876b11e86f) in parent group-v594094. [ 575.694560] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Creating folder: Instances. Parent ref: group-v594107. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 575.695444] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-146c841d-101b-4d3c-ad2f-827ef129d5a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.704751] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Created folder: Instances in parent group-v594107. [ 575.704924] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 575.705148] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 575.705359] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-73d4de75-9743-4b6f-8bc0-a05d2f4f570a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 575.725271] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 575.725271] env[68217]: value = "task-2960560" [ 575.725271] env[68217]: _type = "Task" [ 575.725271] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 575.747244] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960560, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.747244] env[68217]: INFO nova.compute.manager [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Took 19.15 seconds to build instance. [ 575.807435] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960556, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 575.819348] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa45ec6d-311e-49d3-9c4c-49f1b81b4b25 req-2e8e82f8-22fa-40f1-8cd4-3ba55c791b92 service nova] Releasing lock "refresh_cache-471e8a27-ed87-461a-b817-cd5ad208dd10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 575.895414] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960557, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.000826] env[68217]: DEBUG nova.compute.utils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 576.006423] env[68217]: DEBUG nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 576.006785] env[68217]: DEBUG nova.network.neutron [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 576.009375] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "refresh_cache-7056fb29-2a2f-4275-a411-4d5f3fcb421f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.009520] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquired lock "refresh_cache-7056fb29-2a2f-4275-a411-4d5f3fcb421f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 576.009659] env[68217]: DEBUG nova.network.neutron [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 576.115355] env[68217]: INFO nova.compute.manager [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Took 19.52 seconds to build instance. [ 576.239494] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960560, 'name': CreateVM_Task, 'duration_secs': 0.493069} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.239608] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 576.240894] env[68217]: DEBUG oslo_vmware.service [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f75e6d1-e427-4e2c-8dd8-3f0c2fbd0d2c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.249800] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.249800] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 576.250163] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 576.250568] env[68217]: DEBUG oslo_concurrency.lockutils [None req-37007b3d-4722-4e0a-8d35-6930256744e1 tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.673s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 576.250753] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d214569-c0d2-4b19-a908-379f4cb74dd0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.253575] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 12.682s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.254712] env[68217]: INFO nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] During sync_power_state the instance has a pending task (spawning). Skip. [ 576.254918] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 576.259541] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 576.259541] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5218f136-8fc2-4a6c-965b-25ae62b31317" [ 576.259541] env[68217]: _type = "Task" [ 576.259541] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.270934] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5218f136-8fc2-4a6c-965b-25ae62b31317, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.312956] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960556, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.349393] env[68217]: DEBUG nova.policy [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'bf20ad076e4b460f917db1f3bda6a02c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd957b78fc55e42ca8abb7817c82d9db7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 576.397537] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960557, 'name': ReconfigVM_Task, 'duration_secs': 0.625537} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.398060] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Reconfigured VM instance instance-00000004 to attach disk [datastore1] 471e8a27-ed87-461a-b817-cd5ad208dd10/471e8a27-ed87-461a-b817-cd5ad208dd10.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 576.399199] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fc2f2a8b-1e79-4b42-b7fb-decc6d28d816 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.409057] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 576.409057] env[68217]: value = "task-2960561" [ 576.409057] env[68217]: _type = "Task" [ 576.409057] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.426134] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960561, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 576.505861] env[68217]: DEBUG nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 576.616936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3755ad12-38bc-4ee2-ba97-862d1874f9a7 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.034s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 576.618581] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 13.046s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 576.618658] env[68217]: INFO nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] During sync_power_state the instance has a pending task (spawning). Skip. [ 576.619092] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 576.644383] env[68217]: DEBUG nova.network.neutron [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 576.756444] env[68217]: DEBUG nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 576.777293] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 576.777293] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 576.780489] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 576.780489] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 576.780489] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 576.780489] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e35d60af-4f3a-417d-8053-78a47dc17252 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.792469] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.792469] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 576.792989] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3ed1fe9-559e-4290-9f95-a9af1022cc00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.805975] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8e15db30-e71b-47af-9423-c0d71a86cb3b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.817099] env[68217]: DEBUG oslo_vmware.api [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960556, 'name': PowerOnVM_Task, 'duration_secs': 1.225133} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.818842] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 576.818842] env[68217]: INFO nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Took 11.35 seconds to spawn the instance on the hypervisor. [ 576.818842] env[68217]: DEBUG nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 576.819141] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 576.819141] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52255bbf-b483-50c7-0b6f-0d37c712334e" [ 576.819141] env[68217]: _type = "Task" [ 576.819141] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 576.819825] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7ed8cc-65d6-4d1b-b5af-e45136344ade {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.837983] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Preparing fetch location {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 576.838245] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Creating directory with path [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 576.838469] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8ba3ad2-cc0b-4fc4-9cab-5f1783758d9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.863545] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Created directory with path [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 576.863779] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Fetch image to [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 576.864486] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Downloading image file data 575ba628-84b6-4b0c-98ba-305166627d10 to [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk on the data store datastore2 {{(pid=68217) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 576.865511] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2336e053-c920-4813-bb38-5c151759368a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.880678] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfe829a-1899-457f-8ae5-eafac647c48c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.893149] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31afa6e7-1a5d-4bc0-b14a-dc34066a168d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.946531] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-707ae7ed-69f6-4d11-9501-9cc93ef7fd3e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.949822] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0f225e-0782-4797-9cd5-b06d648518e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.958874] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960561, 'name': Rename_Task, 'duration_secs': 0.189069} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 576.962411] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 576.963933] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a6a5187d-c065-467d-a8c7-3c815fdae14c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.965087] env[68217]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ef7f9eff-74a4-4298-b298-9521d82dd12f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 576.967449] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ff75267-8f4c-409d-8131-3438458e3c69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.009851] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d26fe157-da68-4ef7-a7da-2bf804da9ff9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.012951] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 577.012951] env[68217]: value = "task-2960562" [ 577.012951] env[68217]: _type = "Task" [ 577.012951] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 577.024464] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a133fc3e-3173-46a2-b169-916c17df3bd3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.032259] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960562, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.042390] env[68217]: DEBUG nova.compute.provider_tree [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.070351] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Downloading image file data 575ba628-84b6-4b0c-98ba-305166627d10 to the data store datastore2 {{(pid=68217) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 577.120054] env[68217]: DEBUG nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 577.145861] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68217) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 577.296115] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.359540] env[68217]: INFO nova.compute.manager [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Took 20.28 seconds to build instance. [ 577.524702] env[68217]: DEBUG nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 577.528017] env[68217]: DEBUG nova.network.neutron [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Updated VIF entry in instance network info cache for port 852ba444-6eea-4b2f-bbd8-58cdde27ee66. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 577.528691] env[68217]: DEBUG nova.network.neutron [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Updating instance_info_cache with network_info: [{"id": "852ba444-6eea-4b2f-bbd8-58cdde27ee66", "address": "fa:16:3e:eb:f4:03", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.149", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap852ba444-6e", "ovs_interfaceid": "852ba444-6eea-4b2f-bbd8-58cdde27ee66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.542019] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960562, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 577.544976] env[68217]: DEBUG nova.scheduler.client.report [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 577.575076] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 577.575379] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 577.576094] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 577.576094] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 577.576094] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 577.576094] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 577.577802] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 577.579415] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 577.580311] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 577.580311] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 577.580311] env[68217]: DEBUG nova.virt.hardware [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 577.586299] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91837f0-6615-463e-98d6-fabab837432c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.602541] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c33fe52-d05e-413a-8f38-5280d43ae4df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.607731] env[68217]: DEBUG nova.network.neutron [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Updating instance_info_cache with network_info: [{"id": "3f82f80c-ad2c-4e9a-a247-99c0d137f553", "address": "fa:16:3e:38:7f:78", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f82f80c-ad", "ovs_interfaceid": "3f82f80c-ad2c-4e9a-a247-99c0d137f553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 577.651726] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 577.720819] env[68217]: DEBUG nova.network.neutron [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Successfully updated port: be83d87a-5096-4216-a402-35655f496cc0 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 577.733261] env[68217]: DEBUG nova.compute.manager [None req-b4f8ab4f-ddaa-46a0-bf8c-edbbcf3e6da1 tempest-ServerDiagnosticsTest-773139321 tempest-ServerDiagnosticsTest-773139321-project-admin] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 577.735176] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ee9930-85d2-442b-bc48-de6a68e5dcd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.745261] env[68217]: INFO nova.compute.manager [None req-b4f8ab4f-ddaa-46a0-bf8c-edbbcf3e6da1 tempest-ServerDiagnosticsTest-773139321 tempest-ServerDiagnosticsTest-773139321-project-admin] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Retrieving diagnostics [ 577.745261] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b568ee0c-d6c4-4883-bbca-a30a807e40bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.863831] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef8975de-fc6c-4383-b8d6-ef812f2d53fe tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.791s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 577.863831] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 14.291s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 577.869450] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29f4a64-83d6-4545-b7fa-6cc90cf6e1e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.958566] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Completed reading data from the image iterator. {{(pid=68217) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 577.958719] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 578.029298] env[68217]: DEBUG oslo_vmware.api [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960562, 'name': PowerOnVM_Task, 'duration_secs': 0.584205} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.029557] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 578.029630] env[68217]: INFO nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Took 10.23 seconds to spawn the instance on the hypervisor. [ 578.029882] env[68217]: DEBUG nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 578.030726] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e15288-2313-415f-97b2-0f8b76caf2ac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.038680] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Downloaded image file data 575ba628-84b6-4b0c-98ba-305166627d10 to vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk on the data store datastore2 {{(pid=68217) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 578.041629] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Caching image {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 578.041629] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Copying Virtual Disk [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk to [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 578.041801] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-28574b53-626f-488b-a488-9a552bc947a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.046199] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9eaceaf-aa4d-4516-8a98-37f4059f7d0f req-bee83ff4-2068-414d-af0e-28bb3147a010 service nova] Releasing lock "refresh_cache-4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.053672] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 578.053672] env[68217]: value = "task-2960563" [ 578.053672] env[68217]: _type = "Task" [ 578.053672] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.057709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 578.058285] env[68217]: DEBUG nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 578.066431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 13.194s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.066431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.004s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 578.066431] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 578.068541] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.323s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.071156] env[68217]: INFO nova.compute.claims [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 578.081294] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e518d3e2-0e6c-4cd4-b58c-7490a9538c6c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.087433] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960563, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.096356] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e85b4fb-3b89-4848-93eb-9f51b398a0e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.121416] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Releasing lock "refresh_cache-7056fb29-2a2f-4275-a411-4d5f3fcb421f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.123434] env[68217]: DEBUG nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Instance network_info: |[{"id": "3f82f80c-ad2c-4e9a-a247-99c0d137f553", "address": "fa:16:3e:38:7f:78", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f82f80c-ad", "ovs_interfaceid": "3f82f80c-ad2c-4e9a-a247-99c0d137f553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 578.124859] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:38:7f:78', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f82f80c-ad2c-4e9a-a247-99c0d137f553', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 578.132788] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Creating folder: Project (29ed3216c24841d0859cdc543f5b12bf). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.133650] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85848c0a-b8ea-4390-adec-d6cbd5c3527b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.137269] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f5531dee-604a-4d17-9856-a10e5de98f02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.145775] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ef2f13-70ae-4ca1-aa58-27f51874a7c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.151968] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Created folder: Project (29ed3216c24841d0859cdc543f5b12bf) in parent group-v594094. [ 578.151968] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Creating folder: Instances. Parent ref: group-v594110. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 578.151968] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60ef2af3-083b-4c29-84c2-1280ff090a26 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.184873] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181100MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 578.184873] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 578.184999] env[68217]: DEBUG nova.network.neutron [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Successfully created port: 8a6a7f38-0ada-4d40-9405-e15fe9874407 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 578.198342] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Created folder: Instances in parent group-v594110. [ 578.198611] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 578.198854] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 578.199115] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ebdf967-4e2e-4af4-8819-c9ade7d4e188 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.219520] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 578.219520] env[68217]: value = "task-2960566" [ 578.219520] env[68217]: _type = "Task" [ 578.219520] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.224595] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "refresh_cache-9ac81867-311c-42f3-b38f-67dc10f409c0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.224805] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "refresh_cache-9ac81867-311c-42f3-b38f-67dc10f409c0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.224934] env[68217]: DEBUG nova.network.neutron [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 578.229664] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960566, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.368859] env[68217]: DEBUG nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 578.387305] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.524s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 578.572366] env[68217]: INFO nova.compute.manager [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Took 21.14 seconds to build instance. [ 578.586648] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960563, 'name': CopyVirtualDisk_Task} progress is 27%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 578.591312] env[68217]: DEBUG nova.compute.utils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 578.596351] env[68217]: DEBUG nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 578.596529] env[68217]: DEBUG nova.network.neutron [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 578.702709] env[68217]: DEBUG nova.policy [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1cbcf73e36ca4583b53b9c0c2cfd0e3f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cff35c33460c4a50ae6bee636d950504', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 578.738050] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960566, 'name': CreateVM_Task, 'duration_secs': 0.455089} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 578.738050] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 578.739883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.739883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 578.739883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 578.739883] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c82b0d3c-3541-4a80-b61a-7e49905d6474 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 578.750888] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 578.750888] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d9e750-3d9b-0759-af7c-62ec1b5b0af3" [ 578.750888] env[68217]: _type = "Task" [ 578.750888] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 578.772061] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 578.772309] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 578.772513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 578.813795] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "83d32dd6-2629-4451-a746-bf5270083e2a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 578.814056] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "83d32dd6-2629-4451-a746-bf5270083e2a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 578.849552] env[68217]: DEBUG nova.network.neutron [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 578.903716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.073982] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960563, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.087300] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24ed11dc-55d7-4367-a98c-9a8eba248ffa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.663s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.100353] env[68217]: DEBUG nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 579.139234] env[68217]: DEBUG nova.compute.manager [req-1733e6c6-9079-456c-bf01-99e0b3936861 req-1808a634-8894-4d79-9229-ee9e577a3419 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Received event network-vif-plugged-be83d87a-5096-4216-a402-35655f496cc0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 579.139234] env[68217]: DEBUG oslo_concurrency.lockutils [req-1733e6c6-9079-456c-bf01-99e0b3936861 req-1808a634-8894-4d79-9229-ee9e577a3419 service nova] Acquiring lock "9ac81867-311c-42f3-b38f-67dc10f409c0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.139234] env[68217]: DEBUG oslo_concurrency.lockutils [req-1733e6c6-9079-456c-bf01-99e0b3936861 req-1808a634-8894-4d79-9229-ee9e577a3419 service nova] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.139234] env[68217]: DEBUG oslo_concurrency.lockutils [req-1733e6c6-9079-456c-bf01-99e0b3936861 req-1808a634-8894-4d79-9229-ee9e577a3419 service nova] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.139234] env[68217]: DEBUG nova.compute.manager [req-1733e6c6-9079-456c-bf01-99e0b3936861 req-1808a634-8894-4d79-9229-ee9e577a3419 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] No waiting events found dispatching network-vif-plugged-be83d87a-5096-4216-a402-35655f496cc0 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 579.139521] env[68217]: WARNING nova.compute.manager [req-1733e6c6-9079-456c-bf01-99e0b3936861 req-1808a634-8894-4d79-9229-ee9e577a3419 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Received unexpected event network-vif-plugged-be83d87a-5096-4216-a402-35655f496cc0 for instance with vm_state building and task_state spawning. [ 579.250092] env[68217]: DEBUG nova.network.neutron [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Successfully created port: 67797938-23e4-4820-a467-727dfd4fca29 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 579.303209] env[68217]: DEBUG nova.network.neutron [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Updating instance_info_cache with network_info: [{"id": "be83d87a-5096-4216-a402-35655f496cc0", "address": "fa:16:3e:cd:b2:77", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe83d87a-50", "ovs_interfaceid": "be83d87a-5096-4216-a402-35655f496cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 579.517520] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ba05c2c-72e4-4c6d-bdd4-cdf1c1f8563e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.526800] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cace23d-e1b5-447f-bda2-1dac290a40f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.576882] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-251f2d3d-794d-4ce6-98a8-eec7f8dd1ae3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.583028] env[68217]: DEBUG nova.compute.manager [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Received event network-vif-plugged-3f82f80c-ad2c-4e9a-a247-99c0d137f553 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 579.583028] env[68217]: DEBUG oslo_concurrency.lockutils [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] Acquiring lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 579.583199] env[68217]: DEBUG oslo_concurrency.lockutils [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 579.583352] env[68217]: DEBUG oslo_concurrency.lockutils [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 579.583500] env[68217]: DEBUG nova.compute.manager [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] No waiting events found dispatching network-vif-plugged-3f82f80c-ad2c-4e9a-a247-99c0d137f553 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 579.583648] env[68217]: WARNING nova.compute.manager [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Received unexpected event network-vif-plugged-3f82f80c-ad2c-4e9a-a247-99c0d137f553 for instance with vm_state building and task_state spawning. [ 579.583802] env[68217]: DEBUG nova.compute.manager [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Received event network-changed-3f82f80c-ad2c-4e9a-a247-99c0d137f553 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 579.585357] env[68217]: DEBUG nova.compute.manager [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Refreshing instance network info cache due to event network-changed-3f82f80c-ad2c-4e9a-a247-99c0d137f553. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 579.586979] env[68217]: DEBUG oslo_concurrency.lockutils [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] Acquiring lock "refresh_cache-7056fb29-2a2f-4275-a411-4d5f3fcb421f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 579.586979] env[68217]: DEBUG oslo_concurrency.lockutils [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] Acquired lock "refresh_cache-7056fb29-2a2f-4275-a411-4d5f3fcb421f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 579.586979] env[68217]: DEBUG nova.network.neutron [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Refreshing network info cache for port 3f82f80c-ad2c-4e9a-a247-99c0d137f553 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 579.589673] env[68217]: DEBUG nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 579.599162] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fa08b44-cde3-4e6b-bacb-94e764497469 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.605679] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960563, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.05581} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 579.605679] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Copied Virtual Disk [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk to [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 579.605679] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleting the datastore file [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10/tmp-sparse.vmdk {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 579.605679] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dabcd17f-e5da-4caa-a8ad-4564cdc4d318 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.619988] env[68217]: DEBUG nova.compute.provider_tree [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 579.626610] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 579.626610] env[68217]: value = "task-2960567" [ 579.626610] env[68217]: _type = "Task" [ 579.626610] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.636999] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960567, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 579.808797] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "refresh_cache-9ac81867-311c-42f3-b38f-67dc10f409c0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 579.809189] env[68217]: DEBUG nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Instance network_info: |[{"id": "be83d87a-5096-4216-a402-35655f496cc0", "address": "fa:16:3e:cd:b2:77", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe83d87a-50", "ovs_interfaceid": "be83d87a-5096-4216-a402-35655f496cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 579.809599] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:b2:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e885ebd4-93ca-4e9e-8889-0f16bd91e61e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be83d87a-5096-4216-a402-35655f496cc0', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 579.819686] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 579.820113] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 579.824361] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9f6f86b-dae2-4ed9-bfa1-c22442648589 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.845628] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 579.845628] env[68217]: value = "task-2960568" [ 579.845628] env[68217]: _type = "Task" [ 579.845628] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 579.858701] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960568, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.125285] env[68217]: DEBUG nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 580.128489] env[68217]: DEBUG nova.scheduler.client.report [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 580.139150] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.158177] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960567, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.046446} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.160829] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 580.161017] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Moving file from [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345/575ba628-84b6-4b0c-98ba-305166627d10 to [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10. {{(pid=68217) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 580.161799] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-ab4d991e-eab0-479a-8455-00925b70ea6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.169187] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 580.169878] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 580.169878] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 580.169878] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 580.169878] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 580.170165] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 580.170215] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 580.170389] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 580.170581] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 580.170735] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 580.170897] env[68217]: DEBUG nova.virt.hardware [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 580.172603] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feec5542-c561-4521-87a6-7c5863fba368 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.176376] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 580.176376] env[68217]: value = "task-2960569" [ 580.176376] env[68217]: _type = "Task" [ 580.176376] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.186726] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1925efbf-017e-4c30-8dbc-54d3b290f5c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.194575] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960569, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.358174] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960568, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.621603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.623330] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.623330] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 580.623330] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.623619] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 580.626722] env[68217]: INFO nova.compute.manager [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Terminating instance [ 580.640763] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 580.641355] env[68217]: DEBUG nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 580.646062] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.349s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 580.647223] env[68217]: INFO nova.compute.claims [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 580.698851] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960569, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.038172} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.699197] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] File moved {{(pid=68217) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 580.699348] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Cleaning up location [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 580.699602] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleting the datastore file [datastore2] vmware_temp/5f07ed26-5d47-4eac-9a14-776bd19b6345 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 580.699768] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7280d7e0-a1bb-4e7f-bfae-d978c7cc6172 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.709390] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 580.709390] env[68217]: value = "task-2960570" [ 580.709390] env[68217]: _type = "Task" [ 580.709390] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.724805] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960570, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.857711] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960568, 'name': CreateVM_Task, 'duration_secs': 0.640095} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 580.857994] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 580.861073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 580.861073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 580.861073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 580.861073] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe924681-de61-40a0-b219-421bf8560302 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 580.867788] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 580.867788] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a40dfc-2a47-fa0c-e458-2aac20e3ab34" [ 580.867788] env[68217]: _type = "Task" [ 580.867788] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 580.876689] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a40dfc-2a47-fa0c-e458-2aac20e3ab34, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 580.889218] env[68217]: DEBUG nova.network.neutron [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Updated VIF entry in instance network info cache for port 3f82f80c-ad2c-4e9a-a247-99c0d137f553. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 580.889580] env[68217]: DEBUG nova.network.neutron [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Updating instance_info_cache with network_info: [{"id": "3f82f80c-ad2c-4e9a-a247-99c0d137f553", "address": "fa:16:3e:38:7f:78", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.56", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f82f80c-ad", "ovs_interfaceid": "3f82f80c-ad2c-4e9a-a247-99c0d137f553", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 581.058869] env[68217]: DEBUG nova.network.neutron [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Successfully updated port: 8a6a7f38-0ada-4d40-9405-e15fe9874407 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 581.083574] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Acquiring lock "471e8a27-ed87-461a-b817-cd5ad208dd10" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.084331] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.084331] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Acquiring lock "471e8a27-ed87-461a-b817-cd5ad208dd10-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 581.086515] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 581.086515] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 581.088152] env[68217]: INFO nova.compute.manager [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Terminating instance [ 581.136714] env[68217]: DEBUG nova.compute.manager [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 581.137125] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 581.138642] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa44d306-14f2-4853-90b6-7a4df667b4b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.147915] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 581.148381] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-597094f8-e230-42bc-b1de-e70a459b6e0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.152927] env[68217]: DEBUG nova.compute.utils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 581.156375] env[68217]: DEBUG nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 581.160026] env[68217]: DEBUG oslo_vmware.api [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 581.160026] env[68217]: value = "task-2960571" [ 581.160026] env[68217]: _type = "Task" [ 581.160026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.173854] env[68217]: DEBUG oslo_vmware.api [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.224062] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960570, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.085836} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.224490] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 581.225327] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dac9ad48-66f5-4bfb-91ab-615ef48df14a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.232178] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 581.232178] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527c99de-3cfc-8f99-14b7-8c47e6cb8846" [ 581.232178] env[68217]: _type = "Task" [ 581.232178] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.242666] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527c99de-3cfc-8f99-14b7-8c47e6cb8846, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.353406] env[68217]: DEBUG nova.network.neutron [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Successfully updated port: 67797938-23e4-4820-a467-727dfd4fca29 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 581.381600] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a40dfc-2a47-fa0c-e458-2aac20e3ab34, 'name': SearchDatastore_Task, 'duration_secs': 0.024015} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.382143] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 581.382684] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 581.383028] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.394107] env[68217]: DEBUG oslo_concurrency.lockutils [req-64fde3dc-ad4d-4bc5-ad11-08cc14536b21 req-de555570-1b75-4025-a946-71262b2a8d60 service nova] Releasing lock "refresh_cache-7056fb29-2a2f-4275-a411-4d5f3fcb421f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 581.561405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.563507] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquired lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 581.563507] env[68217]: DEBUG nova.network.neutron [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 581.594787] env[68217]: DEBUG nova.compute.manager [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 581.594787] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 581.594787] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b4104c-353b-4e72-bb7c-ff3744c5b96f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.606419] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 581.606727] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-89538f8b-9ab7-4f11-aee5-8c3e1c82588b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.616469] env[68217]: DEBUG oslo_vmware.api [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Waiting for the task: (returnval){ [ 581.616469] env[68217]: value = "task-2960572" [ 581.616469] env[68217]: _type = "Task" [ 581.616469] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.631817] env[68217]: DEBUG oslo_vmware.api [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Task: {'id': task-2960572, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.658539] env[68217]: DEBUG nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 581.681574] env[68217]: DEBUG oslo_vmware.api [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960571, 'name': PowerOffVM_Task, 'duration_secs': 0.211043} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.681574] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 581.681574] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 581.681574] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ed97614-17e0-4594-8f71-cb9ae4fd36cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.748587] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527c99de-3cfc-8f99-14b7-8c47e6cb8846, 'name': SearchDatastore_Task, 'duration_secs': 0.019658} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 581.753264] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 581.753264] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6/4f4dc254-8e4f-4c5f-a2a8-eef6230825c6.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 581.753264] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 581.753264] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 581.753497] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-365060ad-82b9-4927-b3c9-602c5856a7f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.757038] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7741253c-1d22-4136-a9b7-e61e7093bdab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.759680] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 581.760316] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 581.760387] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Deleting the datastore file [datastore1] cdc84742-e20a-4e48-bfff-b3ac34405c1d {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 581.761505] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-57f65812-f794-4a32-8b42-37d79e5bb048 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.769743] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 581.769743] env[68217]: value = "task-2960574" [ 581.769743] env[68217]: _type = "Task" [ 581.769743] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.773261] env[68217]: DEBUG oslo_vmware.api [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for the task: (returnval){ [ 581.773261] env[68217]: value = "task-2960575" [ 581.773261] env[68217]: _type = "Task" [ 581.773261] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.782645] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 581.782645] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 581.783729] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c78a569a-1698-48c5-88ef-1e245d5df5b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.797974] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960574, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.805771] env[68217]: DEBUG oslo_vmware.api [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960575, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.807715] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 581.807715] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e5d164-e97a-f95d-732d-ec9a990a44ec" [ 581.807715] env[68217]: _type = "Task" [ 581.807715] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 581.819930] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e5d164-e97a-f95d-732d-ec9a990a44ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 581.857253] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "refresh_cache-14c8e8e6-5d7f-45b4-8a84-d5951c38573f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 581.857253] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "refresh_cache-14c8e8e6-5d7f-45b4-8a84-d5951c38573f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 581.857253] env[68217]: DEBUG nova.network.neutron [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 582.036985] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06909a1d-ff2d-40da-955b-2fdba4b6c948 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.048967] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ed5eb5-a0de-4df7-846b-48a534ec9e05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.084261] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0a94bb5-a435-406a-9ae5-f40c6b337edd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.093542] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2abb05-5a45-4ee8-9b57-89e7eefbcd36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.109589] env[68217]: DEBUG nova.compute.provider_tree [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 582.129352] env[68217]: DEBUG oslo_vmware.api [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Task: {'id': task-2960572, 'name': PowerOffVM_Task, 'duration_secs': 0.263909} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.129352] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 582.129352] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 582.129352] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c16191f0-c01f-4e2b-baa8-db56ca1028ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.187889] env[68217]: DEBUG nova.network.neutron [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.204350] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 582.204569] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 582.204790] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Deleting the datastore file [datastore1] 471e8a27-ed87-461a-b817-cd5ad208dd10 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 582.205094] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39992d38-a4f3-4f52-b50e-234259cb5457 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.216976] env[68217]: DEBUG oslo_vmware.api [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Waiting for the task: (returnval){ [ 582.216976] env[68217]: value = "task-2960577" [ 582.216976] env[68217]: _type = "Task" [ 582.216976] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.232652] env[68217]: DEBUG oslo_vmware.api [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Task: {'id': task-2960577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.284943] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960574, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.300167] env[68217]: DEBUG oslo_vmware.api [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Task: {'id': task-2960575, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.349307} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.300167] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 582.300167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 582.300167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 582.300591] env[68217]: INFO nova.compute.manager [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 582.300902] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 582.302108] env[68217]: DEBUG nova.compute.manager [-] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 582.302244] env[68217]: DEBUG nova.network.neutron [-] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 582.323033] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e5d164-e97a-f95d-732d-ec9a990a44ec, 'name': SearchDatastore_Task, 'duration_secs': 0.021742} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.323864] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47708204-c43a-4bb6-82c8-01c431fea040 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.335916] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 582.336204] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 582.336480] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 582.336480] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52350761-62eb-e9be-cb26-da45118059c5" [ 582.336480] env[68217]: _type = "Task" [ 582.336480] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.351484] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52350761-62eb-e9be-cb26-da45118059c5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.540305] env[68217]: DEBUG nova.network.neutron [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 582.610981] env[68217]: DEBUG nova.network.neutron [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Updating instance_info_cache with network_info: [{"id": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "address": "fa:16:3e:e3:44:28", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6a7f38-0a", "ovs_interfaceid": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 582.612780] env[68217]: DEBUG nova.scheduler.client.report [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 582.676049] env[68217]: DEBUG nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 582.711094] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 582.711346] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.711535] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 582.711720] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.711924] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 582.712037] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 582.712585] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 582.712766] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 582.712942] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 582.713163] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 582.713327] env[68217]: DEBUG nova.virt.hardware [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 582.714234] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cd6d437-04c9-4ce2-b9f5-288b137b6876 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.732406] env[68217]: DEBUG oslo_vmware.api [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Task: {'id': task-2960577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290317} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.734461] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 582.734655] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 582.734825] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 582.735070] env[68217]: INFO nova.compute.manager [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Took 1.14 seconds to destroy the instance on the hypervisor. [ 582.735328] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 582.735692] env[68217]: DEBUG nova.compute.manager [-] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 582.735791] env[68217]: DEBUG nova.network.neutron [-] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 582.738666] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002d6ba6-d74e-4807-8c3b-0fa7dcbd5b0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.757259] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 582.763636] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Creating folder: Project (9948fedb1f33420f8c4d8ddd92280d67). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 582.764434] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f656cad-b403-42c4-90f0-2917f5a06fd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.778472] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Created folder: Project (9948fedb1f33420f8c4d8ddd92280d67) in parent group-v594094. [ 582.778694] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Creating folder: Instances. Parent ref: group-v594114. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 582.782526] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0d8f06d-4fcf-4dc7-bd81-d3664329822f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.785524] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960574, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.689799} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.786262] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6/4f4dc254-8e4f-4c5f-a2a8-eef6230825c6.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 582.786262] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 582.786699] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3800bd2a-2466-4126-a168-dfc53a011464 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.796565] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 582.796565] env[68217]: value = "task-2960580" [ 582.796565] env[68217]: _type = "Task" [ 582.796565] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.800033] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Created folder: Instances in parent group-v594114. [ 582.800033] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 582.803180] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 582.804049] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a9cbee44-6954-41dc-8ccd-0c346431c5c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.823049] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.824511] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 582.824511] env[68217]: value = "task-2960581" [ 582.824511] env[68217]: _type = "Task" [ 582.824511] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.833814] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960581, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.848569] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52350761-62eb-e9be-cb26-da45118059c5, 'name': SearchDatastore_Task, 'duration_secs': 0.056927} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 582.848907] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 582.849214] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 7056fb29-2a2f-4275-a411-4d5f3fcb421f/7056fb29-2a2f-4275-a411-4d5f3fcb421f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 582.849500] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 582.849696] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 582.850078] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-969549d4-a093-4253-bc17-a5198780f076 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.852348] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7362032d-f28e-40c9-8492-0997bff77c46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.862493] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 582.862493] env[68217]: value = "task-2960582" [ 582.862493] env[68217]: _type = "Task" [ 582.862493] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.873345] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 582.873345] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 582.874404] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0642c82-4b1e-464c-ab5e-ae872423e313 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.881594] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960582, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 582.885496] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 582.885496] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52498aab-f47a-9475-a33a-5b2fd77d0538" [ 582.885496] env[68217]: _type = "Task" [ 582.885496] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 582.898358] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52498aab-f47a-9475-a33a-5b2fd77d0538, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.121522] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Releasing lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.122831] env[68217]: DEBUG nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Instance network_info: |[{"id": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "address": "fa:16:3e:e3:44:28", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6a7f38-0a", "ovs_interfaceid": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 583.123960] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.479s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 583.124722] env[68217]: DEBUG nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 583.134360] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e3:44:28', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8a6a7f38-0ada-4d40-9405-e15fe9874407', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 583.149127] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Creating folder: Project (d957b78fc55e42ca8abb7817c82d9db7). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 583.149242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.498s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.151606] env[68217]: INFO nova.compute.claims [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 583.158684] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ac038826-d8b3-47eb-848a-8b783a8b16fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.179229] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Created folder: Project (d957b78fc55e42ca8abb7817c82d9db7) in parent group-v594094. [ 583.179948] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Creating folder: Instances. Parent ref: group-v594117. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 583.179948] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0f1858cb-e670-4ea0-bbeb-72a38ae9af6e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.196610] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Created folder: Instances in parent group-v594117. [ 583.196610] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 583.196610] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 583.196610] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c59776c9-56dd-4a99-aee8-903e47bae033 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.226843] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 583.226843] env[68217]: value = "task-2960585" [ 583.226843] env[68217]: _type = "Task" [ 583.226843] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.238530] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960585, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.311579] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11469} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.312637] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 583.313164] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0b45217-1c1d-4e85-8d52-8be9001231ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.341572] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Reconfiguring VM instance instance-00000005 to attach disk [datastore2] 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6/4f4dc254-8e4f-4c5f-a2a8-eef6230825c6.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 583.343611] env[68217]: DEBUG nova.network.neutron [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Updating instance_info_cache with network_info: [{"id": "67797938-23e4-4820-a467-727dfd4fca29", "address": "fa:16:3e:9e:b9:92", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67797938-23", "ovs_interfaceid": "67797938-23e4-4820-a467-727dfd4fca29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.351305] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba055ae2-db18-4902-9dba-568fbd371eaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.368284] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "refresh_cache-14c8e8e6-5d7f-45b4-8a84-d5951c38573f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.369031] env[68217]: DEBUG nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Instance network_info: |[{"id": "67797938-23e4-4820-a467-727dfd4fca29", "address": "fa:16:3e:9e:b9:92", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67797938-23", "ovs_interfaceid": "67797938-23e4-4820-a467-727dfd4fca29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 583.370169] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:b9:92', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e885ebd4-93ca-4e9e-8889-0f16bd91e61e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67797938-23e4-4820-a467-727dfd4fca29', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 583.378325] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 583.384300] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 583.388830] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cdf137a5-b8eb-4737-95c2-bea96cef477d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.406991] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960581, 'name': CreateVM_Task, 'duration_secs': 0.539326} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.413749] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 583.414033] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 583.414033] env[68217]: value = "task-2960586" [ 583.414033] env[68217]: _type = "Task" [ 583.414033] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.415972] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.415972] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.416102] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 583.418896] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70e9429e-7932-4e6b-a567-8ae04d424b2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.437124] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52498aab-f47a-9475-a33a-5b2fd77d0538, 'name': SearchDatastore_Task, 'duration_secs': 0.010656} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.438130] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 583.438130] env[68217]: value = "task-2960587" [ 583.438130] env[68217]: _type = "Task" [ 583.438130] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.438130] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960582, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.442203] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f1904c7-a84d-4f47-9346-7471b8a8c3cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.451812] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 583.451812] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523c9402-b3c5-fa3d-33bc-250deeea4911" [ 583.451812] env[68217]: _type = "Task" [ 583.451812] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.466941] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960586, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.468195] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960587, 'name': CreateVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.468195] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 583.468195] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520ad9b8-70b7-a12f-aa48-1885b74b1d1a" [ 583.468195] env[68217]: _type = "Task" [ 583.468195] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.479702] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523c9402-b3c5-fa3d-33bc-250deeea4911, 'name': SearchDatastore_Task, 'duration_secs': 0.015212} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.484710] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.485011] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 583.485278] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.485920] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520ad9b8-70b7-a12f-aa48-1885b74b1d1a, 'name': SearchDatastore_Task, 'duration_secs': 0.014565} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.486357] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 583.486576] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 9ac81867-311c-42f3-b38f-67dc10f409c0/9ac81867-311c-42f3-b38f-67dc10f409c0.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 583.486883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.487086] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 583.487313] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b387bfa-7ed2-4748-a9ee-0409d772a2e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.489563] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-242d481b-18f8-46d4-bfb2-0235b9b9b5a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.500748] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 583.500748] env[68217]: value = "task-2960588" [ 583.500748] env[68217]: _type = "Task" [ 583.500748] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.502428] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 583.502585] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 583.508113] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3592aff0-1180-435a-8d8a-e87cd91759ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.520385] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 583.520385] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528b16b4-2b1d-4936-b10e-5b73ee123dc7" [ 583.520385] env[68217]: _type = "Task" [ 583.520385] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.520385] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960588, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.529989] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528b16b4-2b1d-4936-b10e-5b73ee123dc7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.646234] env[68217]: DEBUG nova.compute.manager [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Received event network-changed-be83d87a-5096-4216-a402-35655f496cc0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 583.647017] env[68217]: DEBUG nova.compute.manager [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Refreshing instance network info cache due to event network-changed-be83d87a-5096-4216-a402-35655f496cc0. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 583.647017] env[68217]: DEBUG oslo_concurrency.lockutils [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] Acquiring lock "refresh_cache-9ac81867-311c-42f3-b38f-67dc10f409c0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.647017] env[68217]: DEBUG oslo_concurrency.lockutils [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] Acquired lock "refresh_cache-9ac81867-311c-42f3-b38f-67dc10f409c0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.647988] env[68217]: DEBUG nova.network.neutron [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Refreshing network info cache for port be83d87a-5096-4216-a402-35655f496cc0 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.659932] env[68217]: DEBUG nova.compute.utils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 583.661889] env[68217]: DEBUG nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 583.673094] env[68217]: DEBUG nova.compute.manager [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Received event network-vif-plugged-8a6a7f38-0ada-4d40-9405-e15fe9874407 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 583.674547] env[68217]: DEBUG oslo_concurrency.lockutils [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] Acquiring lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 583.674547] env[68217]: DEBUG oslo_concurrency.lockutils [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 583.674547] env[68217]: DEBUG oslo_concurrency.lockutils [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 583.674547] env[68217]: DEBUG nova.compute.manager [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] No waiting events found dispatching network-vif-plugged-8a6a7f38-0ada-4d40-9405-e15fe9874407 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 583.674876] env[68217]: WARNING nova.compute.manager [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Received unexpected event network-vif-plugged-8a6a7f38-0ada-4d40-9405-e15fe9874407 for instance with vm_state building and task_state spawning. [ 583.675131] env[68217]: DEBUG nova.compute.manager [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Received event network-changed-8a6a7f38-0ada-4d40-9405-e15fe9874407 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 583.676330] env[68217]: DEBUG nova.compute.manager [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Refreshing instance network info cache due to event network-changed-8a6a7f38-0ada-4d40-9405-e15fe9874407. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 583.676330] env[68217]: DEBUG oslo_concurrency.lockutils [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] Acquiring lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.676330] env[68217]: DEBUG oslo_concurrency.lockutils [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] Acquired lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.676330] env[68217]: DEBUG nova.network.neutron [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Refreshing network info cache for port 8a6a7f38-0ada-4d40-9405-e15fe9874407 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 583.744857] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960585, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.882616] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960582, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.568976} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.883085] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 7056fb29-2a2f-4275-a411-4d5f3fcb421f/7056fb29-2a2f-4275-a411-4d5f3fcb421f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 583.883938] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 583.883938] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3988ef6d-208a-4fe8-8f27-1e5808c56867 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.894925] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 583.894925] env[68217]: value = "task-2960589" [ 583.894925] env[68217]: _type = "Task" [ 583.894925] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.907486] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960589, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.931864] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960586, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 583.960542] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960587, 'name': CreateVM_Task, 'duration_secs': 0.430101} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 583.961402] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 583.964232] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 583.964232] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 583.964232] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 583.964232] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95217953-2be5-4c35-bbb4-2faf7adfc278 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.972727] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 583.972727] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294264b-bc17-0a12-9569-208e17e4a88d" [ 583.972727] env[68217]: _type = "Task" [ 583.972727] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 583.973146] env[68217]: DEBUG nova.network.neutron [-] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.986927] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294264b-bc17-0a12-9569-208e17e4a88d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.021457] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960588, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.041528] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528b16b4-2b1d-4936-b10e-5b73ee123dc7, 'name': SearchDatastore_Task, 'duration_secs': 0.024545} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.042321] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2c17e4d2-dc69-4407-b88a-984f380de65f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.051135] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 584.051135] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52655de8-c252-f69e-9c32-8f2a864147b5" [ 584.051135] env[68217]: _type = "Task" [ 584.051135] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.062742] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52655de8-c252-f69e-9c32-8f2a864147b5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.104016] env[68217]: DEBUG nova.network.neutron [-] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.163242] env[68217]: DEBUG nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 584.253802] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960585, 'name': CreateVM_Task, 'duration_secs': 0.639761} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.255041] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 584.255795] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.408129] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960589, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.14142} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.408129] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 584.408129] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac403976-4dd5-4a68-84c5-0162d14c7554 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.435982] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Reconfiguring VM instance instance-00000006 to attach disk [datastore2] 7056fb29-2a2f-4275-a411-4d5f3fcb421f/7056fb29-2a2f-4275-a411-4d5f3fcb421f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 584.442029] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9f6762e8-83a5-4934-8c28-06350c3c96d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.458496] env[68217]: DEBUG nova.network.neutron [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Updated VIF entry in instance network info cache for port be83d87a-5096-4216-a402-35655f496cc0. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 584.458496] env[68217]: DEBUG nova.network.neutron [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Updating instance_info_cache with network_info: [{"id": "be83d87a-5096-4216-a402-35655f496cc0", "address": "fa:16:3e:cd:b2:77", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe83d87a-50", "ovs_interfaceid": "be83d87a-5096-4216-a402-35655f496cc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.466874] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960586, 'name': ReconfigVM_Task, 'duration_secs': 0.848242} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.468740] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Reconfigured VM instance instance-00000005 to attach disk [datastore2] 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6/4f4dc254-8e4f-4c5f-a2a8-eef6230825c6.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 584.468740] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 584.468740] env[68217]: value = "task-2960590" [ 584.468740] env[68217]: _type = "Task" [ 584.468740] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.469030] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-42c3524f-9c4c-4de4-9955-748bf52f5485 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.480848] env[68217]: INFO nova.compute.manager [-] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Took 2.18 seconds to deallocate network for instance. [ 584.487821] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960590, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.488182] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 584.488182] env[68217]: value = "task-2960591" [ 584.488182] env[68217]: _type = "Task" [ 584.488182] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.498031] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294264b-bc17-0a12-9569-208e17e4a88d, 'name': SearchDatastore_Task, 'duration_secs': 0.059473} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.502516] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 584.502751] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 584.503051] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.506112] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 584.506476] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 584.507710] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbe0d620-66a5-448b-9909-6e943f49fa6a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.520013] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960591, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.524066] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960588, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.752273} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.528220] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 9ac81867-311c-42f3-b38f-67dc10f409c0/9ac81867-311c-42f3-b38f-67dc10f409c0.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 584.528488] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 584.528866] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 584.528866] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c7171d-51a1-ca4b-6b8b-609cf3cace5f" [ 584.528866] env[68217]: _type = "Task" [ 584.528866] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.529281] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-07034660-5f39-403a-9ee9-fd8bf6bc3494 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.546072] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c7171d-51a1-ca4b-6b8b-609cf3cace5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.546145] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 584.546145] env[68217]: value = "task-2960592" [ 584.546145] env[68217]: _type = "Task" [ 584.546145] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.561349] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960592, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.570449] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52655de8-c252-f69e-9c32-8f2a864147b5, 'name': SearchDatastore_Task, 'duration_secs': 0.056465} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.570449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 584.570449] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 00d2302b-84d4-42d8-94c7-caf45b925ddf/00d2302b-84d4-42d8-94c7-caf45b925ddf.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 584.570449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 584.570650] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 584.570650] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9c97f74c-605b-42f1-a078-8418dcbaebae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.572485] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a9c989b-fb27-42c5-8edd-238c32a6b699 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.584277] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 584.584277] env[68217]: value = "task-2960593" [ 584.584277] env[68217]: _type = "Task" [ 584.584277] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.590197] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 584.590197] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 584.591069] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20cdc679-f6bc-4630-9b05-6d85d66d19e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.598182] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960593, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.604846] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 584.604846] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e720f9-021c-4126-1df3-6ae881400564" [ 584.604846] env[68217]: _type = "Task" [ 584.604846] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 584.608477] env[68217]: INFO nova.compute.manager [-] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Took 1.87 seconds to deallocate network for instance. [ 584.616778] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e720f9-021c-4126-1df3-6ae881400564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 584.676117] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e884fb78-21d7-4d54-9cce-a7052c6eacf5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.686718] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-256b5b0a-9160-4778-a93f-93b951fb4461 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.733257] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cce5719-df92-43da-a98e-b4d9e96c325e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.737763] env[68217]: DEBUG nova.network.neutron [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Updated VIF entry in instance network info cache for port 8a6a7f38-0ada-4d40-9405-e15fe9874407. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 584.738272] env[68217]: DEBUG nova.network.neutron [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Updating instance_info_cache with network_info: [{"id": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "address": "fa:16:3e:e3:44:28", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6a7f38-0a", "ovs_interfaceid": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.747273] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e4a4f75-e85c-4da1-8d06-8a195c18f888 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.764971] env[68217]: DEBUG nova.compute.provider_tree [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.961719] env[68217]: DEBUG oslo_concurrency.lockutils [req-c481735f-d3f7-4722-bc3c-03f85de9784b req-955ae454-ebc6-4c5b-9ee5-cc839ede1895 service nova] Releasing lock "refresh_cache-9ac81867-311c-42f3-b38f-67dc10f409c0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 584.982053] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960590, 'name': ReconfigVM_Task, 'duration_secs': 0.35424} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 584.982395] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Reconfigured VM instance instance-00000006 to attach disk [datastore2] 7056fb29-2a2f-4275-a411-4d5f3fcb421f/7056fb29-2a2f-4275-a411-4d5f3fcb421f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 584.983177] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-618d3082-0603-446a-9b9f-270f30ded1fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 584.991460] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 584.991460] env[68217]: value = "task-2960594" [ 584.991460] env[68217]: _type = "Task" [ 584.991460] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.005257] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.005792] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960594, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.013659] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960591, 'name': Rename_Task, 'duration_secs': 0.269747} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.014122] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 585.014444] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cead53cb-53de-4d45-b8b5-552e18530273 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.027227] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 585.027227] env[68217]: value = "task-2960595" [ 585.027227] env[68217]: _type = "Task" [ 585.027227] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.041295] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960595, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.049349] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c7171d-51a1-ca4b-6b8b-609cf3cace5f, 'name': SearchDatastore_Task, 'duration_secs': 0.022381} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.053256] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 585.053663] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 585.053846] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.060594] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960592, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129724} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.060924] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 585.061817] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88729e01-95ca-48d4-948e-05e173bc985e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.087873] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Reconfiguring VM instance instance-00000007 to attach disk [datastore2] 9ac81867-311c-42f3-b38f-67dc10f409c0/9ac81867-311c-42f3-b38f-67dc10f409c0.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 585.088446] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73ad467a-8fef-4352-a9f6-7210a2d97ba4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.116595] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 585.116595] env[68217]: value = "task-2960596" [ 585.116595] env[68217]: _type = "Task" [ 585.116595] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.124325] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e720f9-021c-4126-1df3-6ae881400564, 'name': SearchDatastore_Task, 'duration_secs': 0.019088} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.125384] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.125750] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960593, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.131732] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1daaaee-b5c0-4dda-897d-fef9df436eba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.145045] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 585.145045] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526d7abd-38fc-06f0-c402-587a5e2c832c" [ 585.145045] env[68217]: _type = "Task" [ 585.145045] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.145826] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960596, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.158674] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526d7abd-38fc-06f0-c402-587a5e2c832c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.184241] env[68217]: DEBUG nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 585.219902] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 585.220164] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 585.220314] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 585.220490] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 585.220626] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 585.220821] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 585.221038] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 585.221279] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 585.221490] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 585.221656] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 585.221847] env[68217]: DEBUG nova.virt.hardware [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 585.222934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-164d74ca-7810-4a74-9f84-254ac6b50459 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.232735] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97318f8b-b740-499d-8442-4d8545ab1ade {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.250845] env[68217]: DEBUG oslo_concurrency.lockutils [req-270058c2-070b-4689-a230-373003c9506d req-5de9bbe0-9fce-4735-bffb-95854fc2ce8f service nova] Releasing lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 585.251543] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 585.258653] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Creating folder: Project (ed6f1560fc474756af7a9d2b70b79767). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.258653] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cd33379a-4e57-4d6d-8e98-41e85e3c6107 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.268077] env[68217]: DEBUG nova.scheduler.client.report [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 585.275641] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Created folder: Project (ed6f1560fc474756af7a9d2b70b79767) in parent group-v594094. [ 585.275851] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Creating folder: Instances. Parent ref: group-v594121. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 585.276121] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-727303f9-860b-4549-9c32-3b5cd5ed797e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.290502] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Created folder: Instances in parent group-v594121. [ 585.290502] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 585.290655] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 585.290863] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3ab17d19-c880-4f07-b119-afabae15fd12 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.309486] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 585.309486] env[68217]: value = "task-2960599" [ 585.309486] env[68217]: _type = "Task" [ 585.309486] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.319120] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960599, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.503815] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960594, 'name': Rename_Task, 'duration_secs': 0.326654} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.503815] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 585.503959] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-810236b2-3684-4f5a-bc7f-4ed23fcea9aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.515913] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 585.515913] env[68217]: value = "task-2960600" [ 585.515913] env[68217]: _type = "Task" [ 585.515913] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.525529] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960600, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.541930] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960595, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.601036] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960593, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.81759} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.601479] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 00d2302b-84d4-42d8-94c7-caf45b925ddf/00d2302b-84d4-42d8-94c7-caf45b925ddf.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 585.601839] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 585.602226] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-865d410c-127f-4cda-aa9e-bd748ba0026a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.612385] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 585.612385] env[68217]: value = "task-2960601" [ 585.612385] env[68217]: _type = "Task" [ 585.612385] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.622829] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960601, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.636389] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960596, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.657130] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526d7abd-38fc-06f0-c402-587a5e2c832c, 'name': SearchDatastore_Task, 'duration_secs': 0.064508} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.657633] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 585.658033] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 14c8e8e6-5d7f-45b4-8a84-d5951c38573f/14c8e8e6-5d7f-45b4-8a84-d5951c38573f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 585.658454] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 585.658759] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 585.659189] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2980980-d742-4cd3-be40-3e11557f6c46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.661587] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-57e08790-0f01-4566-aa09-1af267766c00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.675026] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 585.675026] env[68217]: value = "task-2960602" [ 585.675026] env[68217]: _type = "Task" [ 585.675026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.679062] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 585.679445] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 585.680739] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f06f021-1d47-4fa5-a613-67a76f6e07c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.686552] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960602, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.693983] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 585.693983] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a5c6d2-3f8f-6144-1d55-3959eb4d1eda" [ 585.693983] env[68217]: _type = "Task" [ 585.693983] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.704156] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a5c6d2-3f8f-6144-1d55-3959eb4d1eda, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.774084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.625s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 585.775945] env[68217]: DEBUG nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 585.779357] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.596s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.826507] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960599, 'name': CreateVM_Task, 'duration_secs': 0.36272} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 585.826700] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 585.827163] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.827326] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 585.827651] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 585.829724] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bed0a26d-ab70-41ae-aad3-7dbaf90734ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.835284] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 585.835284] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52077a24-d29e-02d3-727b-9fb61a69226e" [ 585.835284] env[68217]: _type = "Task" [ 585.835284] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 585.849806] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52077a24-d29e-02d3-727b-9fb61a69226e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 585.993562] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "71dd4921-5859-421f-9e31-e9800adc9e3c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.994927] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.997065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "71dd4921-5859-421f-9e31-e9800adc9e3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 585.997065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 585.997065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 585.998071] env[68217]: INFO nova.compute.manager [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Terminating instance [ 586.028130] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960600, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.042435] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960595, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.050834] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "db4cf157-9511-423c-aa41-433af8d92b48" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.051488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "db4cf157-9511-423c-aa41-433af8d92b48" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.129390] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960601, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088296} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.132762] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 586.133770] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0311a5a-d48a-482d-b06a-f31d3ea5a7e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.147527] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960596, 'name': ReconfigVM_Task, 'duration_secs': 0.566691} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.157042] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Reconfigured VM instance instance-00000007 to attach disk [datastore2] 9ac81867-311c-42f3-b38f-67dc10f409c0/9ac81867-311c-42f3-b38f-67dc10f409c0.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 586.175870] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Reconfiguring VM instance instance-0000000a to attach disk [datastore2] 00d2302b-84d4-42d8-94c7-caf45b925ddf/00d2302b-84d4-42d8-94c7-caf45b925ddf.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 586.175870] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23f77b98-de26-4d6e-8dce-898aa94cf368 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.179461] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73d1a23a-84bb-47a9-b079-86c2e05450fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.211241] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 586.211241] env[68217]: value = "task-2960603" [ 586.211241] env[68217]: _type = "Task" [ 586.211241] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.221779] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a5c6d2-3f8f-6144-1d55-3959eb4d1eda, 'name': SearchDatastore_Task, 'duration_secs': 0.012179} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.222972] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960602, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.225248] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 586.225248] env[68217]: value = "task-2960604" [ 586.225248] env[68217]: _type = "Task" [ 586.225248] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.225248] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5ad8c11-3f14-4e7f-a710-ccb9e7bf1b59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.235065] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960603, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.239652] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 586.239652] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e628e-5f1f-6e8b-8ede-e154f962d833" [ 586.239652] env[68217]: _type = "Task" [ 586.239652] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.247106] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960604, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.253623] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e628e-5f1f-6e8b-8ede-e154f962d833, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.282841] env[68217]: DEBUG nova.compute.utils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 586.293767] env[68217]: DEBUG nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 586.352773] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52077a24-d29e-02d3-727b-9fb61a69226e, 'name': SearchDatastore_Task, 'duration_secs': 0.019655} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.353819] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 586.353819] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 586.353819] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.353819] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 586.354152] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 586.354701] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b6aa10b-830f-4f58-9187-937526e6c632 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.373611] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 586.373693] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 586.375033] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c06711f-eb97-49d0-93f4-c4c9d8fb0a02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.383348] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 586.383348] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5254f554-6187-72b1-a05d-ff170b046523" [ 586.383348] env[68217]: _type = "Task" [ 586.383348] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.396684] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5254f554-6187-72b1-a05d-ff170b046523, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.506847] env[68217]: DEBUG nova.compute.manager [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 586.507094] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 586.507972] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b96e4b3c-3b03-4144-a247-4d27e5d18d99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.517254] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 586.520518] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0b4f1349-bb3f-4354-a247-9f01695d819f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.528749] env[68217]: DEBUG oslo_vmware.api [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960600, 'name': PowerOnVM_Task, 'duration_secs': 0.652623} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.530201] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 586.530882] env[68217]: INFO nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Took 14.00 seconds to spawn the instance on the hypervisor. [ 586.530882] env[68217]: DEBUG nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 586.531848] env[68217]: DEBUG oslo_vmware.api [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 586.531848] env[68217]: value = "task-2960605" [ 586.531848] env[68217]: _type = "Task" [ 586.531848] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.531848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d9b00de-7c93-4722-bbcb-415822c14426 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.547413] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960595, 'name': PowerOnVM_Task} progress is 75%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.553776] env[68217]: DEBUG oslo_vmware.api [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960605, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.691526] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960602, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.721845} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.691950] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 14c8e8e6-5d7f-45b4-8a84-d5951c38573f/14c8e8e6-5d7f-45b4-8a84-d5951c38573f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 586.694820] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 586.694820] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fdeea5de-7342-4f14-ae7e-762cfdc71c76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.703957] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 586.703957] env[68217]: value = "task-2960606" [ 586.703957] env[68217]: _type = "Task" [ 586.703957] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.715424] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960606, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.724991] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960603, 'name': Rename_Task, 'duration_secs': 0.276696} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.725343] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 586.725657] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-de3dcaae-cf60-4aa9-a50d-74b82ba4ad13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.734427] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 586.734427] env[68217]: value = "task-2960607" [ 586.734427] env[68217]: _type = "Task" [ 586.734427] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.741474] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960604, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.749252] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960607, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.756237] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e628e-5f1f-6e8b-8ede-e154f962d833, 'name': SearchDatastore_Task, 'duration_secs': 0.059231} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.756237] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 586.756528] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b0b21c65-ef3d-4492-a6b2-d2321a3dacde/b0b21c65-ef3d-4492-a6b2-d2321a3dacde.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 586.756892] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c919742e-b8d9-45ad-8a37-f59853be1f8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.767626] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 586.767626] env[68217]: value = "task-2960608" [ 586.767626] env[68217]: _type = "Task" [ 586.767626] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.780280] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960608, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.788021] env[68217]: DEBUG nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 586.845013] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.845407] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.845451] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 71dd4921-5859-421f-9e31-e9800adc9e3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.845693] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 586.897788] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5254f554-6187-72b1-a05d-ff170b046523, 'name': SearchDatastore_Task, 'duration_secs': 0.020818} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 586.898883] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3430a737-31a0-4cad-a9e7-b5d2a19d6b36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 586.907444] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 586.907444] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f26232-18cb-6d4d-220c-102c75b99cba" [ 586.907444] env[68217]: _type = "Task" [ 586.907444] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 586.916962] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f26232-18cb-6d4d-220c-102c75b99cba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 586.929310] env[68217]: DEBUG nova.compute.manager [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Received event network-vif-plugged-67797938-23e4-4820-a467-727dfd4fca29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 586.929310] env[68217]: DEBUG oslo_concurrency.lockutils [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] Acquiring lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 586.929310] env[68217]: DEBUG oslo_concurrency.lockutils [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 586.929310] env[68217]: DEBUG oslo_concurrency.lockutils [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 586.929310] env[68217]: DEBUG nova.compute.manager [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] No waiting events found dispatching network-vif-plugged-67797938-23e4-4820-a467-727dfd4fca29 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 586.929721] env[68217]: WARNING nova.compute.manager [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Received unexpected event network-vif-plugged-67797938-23e4-4820-a467-727dfd4fca29 for instance with vm_state building and task_state spawning. [ 586.929721] env[68217]: DEBUG nova.compute.manager [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Received event network-changed-67797938-23e4-4820-a467-727dfd4fca29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 586.929721] env[68217]: DEBUG nova.compute.manager [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Refreshing instance network info cache due to event network-changed-67797938-23e4-4820-a467-727dfd4fca29. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 586.929721] env[68217]: DEBUG oslo_concurrency.lockutils [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] Acquiring lock "refresh_cache-14c8e8e6-5d7f-45b4-8a84-d5951c38573f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.929721] env[68217]: DEBUG oslo_concurrency.lockutils [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] Acquired lock "refresh_cache-14c8e8e6-5d7f-45b4-8a84-d5951c38573f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 586.929934] env[68217]: DEBUG nova.network.neutron [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Refreshing network info cache for port 67797938-23e4-4820-a467-727dfd4fca29 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 587.044698] env[68217]: DEBUG oslo_vmware.api [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960595, 'name': PowerOnVM_Task, 'duration_secs': 1.87526} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.052599] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 587.052824] env[68217]: INFO nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Took 16.93 seconds to spawn the instance on the hypervisor. [ 587.053015] env[68217]: DEBUG nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 587.053881] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c16128f-be47-4a56-b314-5f68c473c062 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.068331] env[68217]: DEBUG oslo_vmware.api [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960605, 'name': PowerOffVM_Task, 'duration_secs': 0.365893} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.075007] env[68217]: INFO nova.compute.manager [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Took 27.39 seconds to build instance. [ 587.078752] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 587.078752] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 587.078752] env[68217]: DEBUG nova.compute.manager [req-4a88a9b4-0ba6-4eb5-84ff-c8bf887550a7 req-9b96dfbe-0903-4608-a2db-3c8f2d93aaa7 service nova] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Received event network-vif-deleted-87dc0f7f-05f9-47ef-a033-652d923dccac {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 587.078752] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c379d1b5-a610-4028-ad32-cb530cf7871b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.171372] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 587.171639] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 587.171813] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Deleting the datastore file [datastore1] 71dd4921-5859-421f-9e31-e9800adc9e3c {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 587.172127] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7bd5cdf-662c-4ddc-8fc6-b08cba5f565b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.185375] env[68217]: DEBUG oslo_vmware.api [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for the task: (returnval){ [ 587.185375] env[68217]: value = "task-2960610" [ 587.185375] env[68217]: _type = "Task" [ 587.185375] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.195835] env[68217]: DEBUG oslo_vmware.api [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960610, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.218397] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960606, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.142667} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.218729] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 587.219524] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74de052d-dffd-4aa9-a0bd-ae66f1271a9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.249779] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Reconfiguring VM instance instance-00000009 to attach disk [datastore2] 14c8e8e6-5d7f-45b4-8a84-d5951c38573f/14c8e8e6-5d7f-45b4-8a84-d5951c38573f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 587.257139] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-649605e9-b080-4963-b930-24f25c85ac82 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.289088] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960604, 'name': ReconfigVM_Task, 'duration_secs': 0.632792} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.289088] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960607, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.290237] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Reconfigured VM instance instance-0000000a to attach disk [datastore2] 00d2302b-84d4-42d8-94c7-caf45b925ddf/00d2302b-84d4-42d8-94c7-caf45b925ddf.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 587.290960] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 587.290960] env[68217]: value = "task-2960611" [ 587.290960] env[68217]: _type = "Task" [ 587.290960] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.291236] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8c34676-cef8-4758-b518-24474e586348 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.299778] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960608, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.307453] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 587.307453] env[68217]: value = "task-2960612" [ 587.307453] env[68217]: _type = "Task" [ 587.307453] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.311622] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960611, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.323314] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960612, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.351146] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 678acc61-1c94-4152-b4e8-7569ab169ab9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 587.420450] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f26232-18cb-6d4d-220c-102c75b99cba, 'name': SearchDatastore_Task, 'duration_secs': 0.016045} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.420827] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 587.421169] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 587.421513] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1fa16be-5361-4100-b071-82719b64b81e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.431440] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 587.431440] env[68217]: value = "task-2960613" [ 587.431440] env[68217]: _type = "Task" [ 587.431440] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.443537] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960613, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.586884] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02ab6f4e-cf7d-400c-b2f7-6f868a85bb99 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.909s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 587.589079] env[68217]: INFO nova.compute.manager [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Took 28.93 seconds to build instance. [ 587.699814] env[68217]: DEBUG oslo_vmware.api [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Task: {'id': task-2960610, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.272392} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.699814] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 587.699814] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 587.699814] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 587.700539] env[68217]: INFO nova.compute.manager [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Took 1.19 seconds to destroy the instance on the hypervisor. [ 587.700539] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 587.700539] env[68217]: DEBUG nova.compute.manager [-] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 587.700539] env[68217]: DEBUG nova.network.neutron [-] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 587.748717] env[68217]: DEBUG oslo_vmware.api [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960607, 'name': PowerOnVM_Task, 'duration_secs': 0.786164} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.749456] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 587.749456] env[68217]: INFO nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Took 12.71 seconds to spawn the instance on the hypervisor. [ 587.749456] env[68217]: DEBUG nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 587.750175] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0790405f-db04-4645-b237-c1825e2d1753 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.791463] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960608, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584727} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.791809] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b0b21c65-ef3d-4492-a6b2-d2321a3dacde/b0b21c65-ef3d-4492-a6b2-d2321a3dacde.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 587.792034] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 587.792291] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1c032df-9556-43b6-94e9-c1a6a4bbaa36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.806094] env[68217]: DEBUG nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 587.809683] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960611, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.813242] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 587.813242] env[68217]: value = "task-2960614" [ 587.813242] env[68217]: _type = "Task" [ 587.813242] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.826328] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960614, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.830180] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960612, 'name': Rename_Task, 'duration_secs': 0.173865} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 587.830512] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 587.830827] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-77b073c7-726c-48a8-81a1-ea988389f36a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.842079] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 587.842079] env[68217]: value = "task-2960615" [ 587.842079] env[68217]: _type = "Task" [ 587.842079] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.847670] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 587.847849] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.847986] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 587.848189] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.848321] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 587.848474] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 587.848655] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 587.848834] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 587.848996] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 587.849173] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 587.849339] env[68217]: DEBUG nova.virt.hardware [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 587.850354] env[68217]: DEBUG nova.network.neutron [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Updated VIF entry in instance network info cache for port 67797938-23e4-4820-a467-727dfd4fca29. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 587.850736] env[68217]: DEBUG nova.network.neutron [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Updating instance_info_cache with network_info: [{"id": "67797938-23e4-4820-a467-727dfd4fca29", "address": "fa:16:3e:9e:b9:92", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67797938-23", "ovs_interfaceid": "67797938-23e4-4820-a467-727dfd4fca29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.852497] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb64508-81d4-4fbe-abd3-0cbe2b2f35a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.858228] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 83d32dd6-2629-4451-a746-bf5270083e2a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 587.866992] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517f19ac-2a49-451d-a7f4-b5789d45e3ce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.873137] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960615, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.888262] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 587.894147] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Creating folder: Project (0154b85d80aa4888887ae90be9f3a1a2). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.894914] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8f4fe730-2cb3-4b52-9bc8-530e7fd085c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.914896] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Created folder: Project (0154b85d80aa4888887ae90be9f3a1a2) in parent group-v594094. [ 587.915215] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Creating folder: Instances. Parent ref: group-v594124. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 587.915579] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2e614a40-8626-4d60-bda8-c603cbe4fcfd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.946138] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Created folder: Instances in parent group-v594124. [ 587.946575] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 587.947393] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 587.947949] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eee46a01-7d5c-4d07-9be9-da989ccb0c83 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.971021] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960613, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 587.976920] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 587.976920] env[68217]: value = "task-2960618" [ 587.976920] env[68217]: _type = "Task" [ 587.976920] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 587.989788] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960618, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.093179] env[68217]: DEBUG nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 588.096595] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b3e83045-6c06-4cf8-99e3-6721f39bb9bb tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.450s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.279668] env[68217]: INFO nova.compute.manager [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Took 28.08 seconds to build instance. [ 588.310020] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960611, 'name': ReconfigVM_Task, 'duration_secs': 0.648598} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.310020] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Reconfigured VM instance instance-00000009 to attach disk [datastore2] 14c8e8e6-5d7f-45b4-8a84-d5951c38573f/14c8e8e6-5d7f-45b4-8a84-d5951c38573f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.310020] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1c036499-5cf5-4cce-860e-e522196768ff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.319525] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 588.319525] env[68217]: value = "task-2960619" [ 588.319525] env[68217]: _type = "Task" [ 588.319525] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.329995] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960614, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.153514} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.330729] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 588.331650] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bfd93ef-b214-4e14-b954-90ed6680b541 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.342630] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960619, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.359733] env[68217]: DEBUG oslo_concurrency.lockutils [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] Releasing lock "refresh_cache-14c8e8e6-5d7f-45b4-8a84-d5951c38573f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 588.359966] env[68217]: DEBUG nova.compute.manager [req-75067bcc-8a6e-415e-aa5a-d5313df3bb5c req-486e83d3-91d6-4982-af14-70eee880e4d3 service nova] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Received event network-vif-deleted-446b7b15-f602-4a41-b415-19e2cff8535a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 588.360756] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 588.371576] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Reconfiguring VM instance instance-00000008 to attach disk [datastore2] b0b21c65-ef3d-4492-a6b2-d2321a3dacde/b0b21c65-ef3d-4492-a6b2-d2321a3dacde.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 588.375410] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8ca1ed5-595e-4b0c-9412-acfc73a1620b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.391145] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960615, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.401402] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 588.401402] env[68217]: value = "task-2960620" [ 588.401402] env[68217]: _type = "Task" [ 588.401402] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.412426] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960620, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.450675] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960613, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.79865} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.451272] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 588.451411] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 588.451649] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2712095-00a3-4296-a81a-2f734f10cf8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.463854] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 588.463854] env[68217]: value = "task-2960621" [ 588.463854] env[68217]: _type = "Task" [ 588.463854] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.474078] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960621, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.496703] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960618, 'name': CreateVM_Task, 'duration_secs': 0.445683} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.497472] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 588.497472] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.497631] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 588.497945] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 588.498226] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8a480bd-aba3-4cc8-9fd0-37da19ef494e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.506254] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 588.506254] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523564ef-8301-1a0a-f28d-9a5763f04b0f" [ 588.506254] env[68217]: _type = "Task" [ 588.506254] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.524142] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523564ef-8301-1a0a-f28d-9a5763f04b0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.601445] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 588.634018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 588.782742] env[68217]: DEBUG nova.network.neutron [-] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.784175] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4685c39-9da2-4958-8551-673cff9a97dd tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.595s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 588.830847] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960619, 'name': Rename_Task, 'duration_secs': 0.243143} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.831476] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 588.831886] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cc6a9fee-553a-4495-8358-4542abeee43f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.840430] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 588.840430] env[68217]: value = "task-2960622" [ 588.840430] env[68217]: _type = "Task" [ 588.840430] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.850157] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960622, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.855622] env[68217]: DEBUG oslo_vmware.api [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960615, 'name': PowerOnVM_Task, 'duration_secs': 0.694906} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.855885] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 588.856097] env[68217]: INFO nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Took 6.18 seconds to spawn the instance on the hypervisor. [ 588.856291] env[68217]: DEBUG nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 588.857103] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9c5455-dca8-46f2-a477-44666901f9e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.876212] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 63e0fc9e-5182-4781-b007-69e2134718df has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 588.911243] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960620, 'name': ReconfigVM_Task, 'duration_secs': 0.446881} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.911818] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Reconfigured VM instance instance-00000008 to attach disk [datastore2] b0b21c65-ef3d-4492-a6b2-d2321a3dacde/b0b21c65-ef3d-4492-a6b2-d2321a3dacde.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 588.912874] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bdb0c634-6e09-446b-992c-27866caf5a77 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.921632] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 588.921632] env[68217]: value = "task-2960623" [ 588.921632] env[68217]: _type = "Task" [ 588.921632] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 588.935173] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960623, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 588.974268] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960621, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091519} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 588.974561] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 588.975357] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b98b0445-556e-46ba-bae0-a91eb579dbf1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.999078] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 588.999418] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7abc6427-fe66-4727-aac1-5b82faec7fd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.028481] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523564ef-8301-1a0a-f28d-9a5763f04b0f, 'name': SearchDatastore_Task, 'duration_secs': 0.015778} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.029995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 589.030340] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 589.030682] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 589.030910] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 589.031140] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 589.031471] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 589.031471] env[68217]: value = "task-2960624" [ 589.031471] env[68217]: _type = "Task" [ 589.031471] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.031702] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ec85ade0-3974-4a88-9f9b-26a400dc67c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.043757] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960624, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.048856] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 589.048957] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 589.049674] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a79372b-627a-4168-9111-95b5ea41776e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.056509] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 589.056509] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528020a3-72d7-9a10-6dc0-bd4c3e8b9eec" [ 589.056509] env[68217]: _type = "Task" [ 589.056509] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.066224] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528020a3-72d7-9a10-6dc0-bd4c3e8b9eec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.128736] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.289114] env[68217]: INFO nova.compute.manager [-] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Took 1.59 seconds to deallocate network for instance. [ 589.289823] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 589.351496] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960622, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.374741] env[68217]: INFO nova.compute.manager [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Took 23.65 seconds to build instance. [ 589.379497] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 589.379658] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 471e8a27-ed87-461a-b817-cd5ad208dd10 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 589.379785] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 00d2302b-84d4-42d8-94c7-caf45b925ddf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 589.438461] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960623, 'name': Rename_Task, 'duration_secs': 0.36148} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.439059] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 589.439536] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0c6a7b04-e7b9-4bae-a1ba-142324038d14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.455798] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 589.455798] env[68217]: value = "task-2960625" [ 589.455798] env[68217]: _type = "Task" [ 589.455798] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.471032] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960625, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.553540] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960624, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.572288] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528020a3-72d7-9a10-6dc0-bd4c3e8b9eec, 'name': SearchDatastore_Task, 'duration_secs': 0.020187} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.572288] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49273621-e404-45bf-b4b2-9a3acc51be55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.582126] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 589.582126] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fa110e-22e6-2f25-8201-43b5e7a47ba1" [ 589.582126] env[68217]: _type = "Task" [ 589.582126] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 589.597484] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fa110e-22e6-2f25-8201-43b5e7a47ba1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 589.801625] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.826503] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 589.851643] env[68217]: DEBUG oslo_vmware.api [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960622, 'name': PowerOnVM_Task, 'duration_secs': 0.898151} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 589.851982] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 589.852289] env[68217]: INFO nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Took 9.73 seconds to spawn the instance on the hypervisor. [ 589.852445] env[68217]: DEBUG nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 589.853871] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3749cd6-f018-41a8-b389-ca62e610e3b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.877298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db956c30-1603-4496-8024-3238ddf9ca18 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "00d2302b-84d4-42d8-94c7-caf45b925ddf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.188s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 589.882092] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 93b49e91-5e9a-4b11-a833-31ab0883e0e8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 589.970701] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960625, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.016080] env[68217]: DEBUG nova.compute.manager [req-af631c69-e592-48e4-aea8-3524a96c5504 req-e4075ac3-083b-4635-a14d-54e1abcbcff8 service nova] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Received event network-vif-deleted-bfc8f377-ffbf-460c-bbb3-ae9af4f22fe8 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 590.054107] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.054407] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.054646] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960624, 'name': ReconfigVM_Task, 'duration_secs': 0.668798} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.054903] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Reconfigured VM instance instance-0000000b to attach disk [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 590.055620] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-69b77b27-1416-401a-8294-22a252d96edb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.065256] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 590.065256] env[68217]: value = "task-2960626" [ 590.065256] env[68217]: _type = "Task" [ 590.065256] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.075707] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960626, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.093084] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fa110e-22e6-2f25-8201-43b5e7a47ba1, 'name': SearchDatastore_Task, 'duration_secs': 0.018761} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.093350] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 590.093599] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5/0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 590.093922] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d5ed35f-b81d-4eba-be59-36af3c56064c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.102743] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 590.102743] env[68217]: value = "task-2960627" [ 590.102743] env[68217]: _type = "Task" [ 590.102743] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.112437] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960627, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.382182] env[68217]: INFO nova.compute.manager [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Took 28.12 seconds to build instance. [ 590.383513] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 590.389791] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance bbd282ea-58aa-47b8-aa82-283a55ac1b29 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 590.471832] env[68217]: DEBUG oslo_vmware.api [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960625, 'name': PowerOnVM_Task, 'duration_secs': 0.804901} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.473490] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 590.473970] env[68217]: INFO nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Took 12.95 seconds to spawn the instance on the hypervisor. [ 590.474454] env[68217]: DEBUG nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 590.475665] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa3e205-9540-47b9-8d26-b5fd1ff6d862 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.584741] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960626, 'name': Rename_Task, 'duration_secs': 0.210238} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 590.585807] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 590.586302] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a981baf-8a95-4a13-99c0-016c06e3c408 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.596972] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 590.596972] env[68217]: value = "task-2960628" [ 590.596972] env[68217]: _type = "Task" [ 590.596972] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 590.612860] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960628, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.616430] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960627, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 590.889745] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.889745] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.889745] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 590.890229] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 590.891971] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.893697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b432e25a-8d50-4c30-b5f6-63fa837a05ad tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.644s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 590.895840] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance af11d05f-4432-4505-bb52-226414488960 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 590.897718] env[68217]: INFO nova.compute.manager [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Terminating instance [ 590.932564] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.010779] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "fcddfd72-a130-4efc-82cb-1fb22d33d684" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.011851] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 591.012337] env[68217]: INFO nova.compute.manager [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Took 29.78 seconds to build instance. [ 591.112153] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960628, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.120681] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960627, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.589973} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.120989] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5/0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 591.121251] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 591.121999] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9c315f1-eedf-4b4d-a807-22a4279a9b81 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.132876] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 591.132876] env[68217]: value = "task-2960629" [ 591.132876] env[68217]: _type = "Task" [ 591.132876] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.143769] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960629, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.398730] env[68217]: DEBUG nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 591.406372] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance dfeeed37-8c84-4ecc-87ea-f4239f512fb1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 591.406372] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance cdc84742-e20a-4e48-bfff-b3ac34405c1d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 591.406573] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 11f9c054-62b9-4ac9-9651-5c85e7a86663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 591.406573] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 14c8e8e6-5d7f-45b4-8a84-d5951c38573f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 591.407415] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 9ac81867-311c-42f3-b38f-67dc10f409c0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 591.407415] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b0b21c65-ef3d-4492-a6b2-d2321a3dacde actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 591.412580] env[68217]: DEBUG nova.compute.manager [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 591.412853] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 591.414726] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5802348-3888-4b3f-a79d-8ce61dca41ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.426473] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 591.427745] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a0ee837-0a5d-49d3-9307-be07c7f95f63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.444705] env[68217]: DEBUG oslo_vmware.api [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 591.444705] env[68217]: value = "task-2960630" [ 591.444705] env[68217]: _type = "Task" [ 591.444705] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.457243] env[68217]: DEBUG oslo_vmware.api [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960630, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.514785] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f64c0e5c-9645-4b1b-af04-3926a5c6a4d4 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.300s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 591.610930] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960628, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.645521] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960629, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083371} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.645835] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 591.647652] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdb9b6c8-f2bb-4dd1-b62d-350c79fc2f8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.679833] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Reconfiguring VM instance instance-0000000c to attach disk [datastore1] 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5/0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 591.682815] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e01ca820-d398-4904-8d06-ae4d827dad84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.714783] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 591.714783] env[68217]: value = "task-2960631" [ 591.714783] env[68217]: _type = "Task" [ 591.714783] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 591.725197] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960631, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 591.921306] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance db4cf157-9511-423c-aa41-433af8d92b48 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 591.921651] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance ae5fa3f4-e487-40ed-9ca4-12a6f9713eba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 591.921651] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 591.922067] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 591.948626] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 591.963597] env[68217]: DEBUG oslo_vmware.api [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960630, 'name': PowerOffVM_Task, 'duration_secs': 0.457704} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 591.964022] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 591.964232] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 591.964534] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d7c56468-c7c6-4994-8d44-6848ecd46cc6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.024988] env[68217]: DEBUG nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 592.050430] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 592.050779] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 592.051272] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Deleting the datastore file [datastore2] 7056fb29-2a2f-4275-a411-4d5f3fcb421f {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 592.053935] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-478348ee-d9a8-4fd1-b7d6-45aedb2f0408 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.064422] env[68217]: DEBUG oslo_vmware.api [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for the task: (returnval){ [ 592.064422] env[68217]: value = "task-2960633" [ 592.064422] env[68217]: _type = "Task" [ 592.064422] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.077352] env[68217]: DEBUG oslo_vmware.api [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960633, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.111815] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960628, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.227058] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960631, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.371409] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c2b54f8-6b4c-45c8-b5ab-7ed9e1dc6092 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.379236] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d33ba66e-ae2a-4775-b9ba-526489fd490d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.414516] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b2dd24-cc73-4137-a0b4-b5a1a425b5a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.425596] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29b9810-bda3-4284-a409-593785414502 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.447110] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.572949] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 592.579468] env[68217]: DEBUG oslo_vmware.api [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Task: {'id': task-2960633, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.301473} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.580116] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 592.580461] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 592.583019] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 592.583019] env[68217]: INFO nova.compute.manager [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Took 1.17 seconds to destroy the instance on the hypervisor. [ 592.583019] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 592.583019] env[68217]: DEBUG nova.compute.manager [-] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 592.583019] env[68217]: DEBUG nova.network.neutron [-] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 592.612631] env[68217]: DEBUG oslo_vmware.api [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960628, 'name': PowerOnVM_Task, 'duration_secs': 1.536865} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.614455] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 592.614455] env[68217]: INFO nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Took 7.43 seconds to spawn the instance on the hypervisor. [ 592.614455] env[68217]: DEBUG nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 592.615945] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39019fe2-6416-478e-a298-6f9cf72c74c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.727400] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960631, 'name': ReconfigVM_Task, 'duration_secs': 0.521792} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 592.727763] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Reconfigured VM instance instance-0000000c to attach disk [datastore1] 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5/0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 592.730298] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6be3f652-1fbe-4a8a-9f0e-276265b088fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.740921] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 592.740921] env[68217]: value = "task-2960634" [ 592.740921] env[68217]: _type = "Task" [ 592.740921] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 592.752716] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960634, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 592.951169] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 593.139168] env[68217]: INFO nova.compute.manager [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Took 15.87 seconds to build instance. [ 593.254513] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960634, 'name': Rename_Task, 'duration_secs': 0.196187} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 593.255081] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 593.255837] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0be35591-073e-4a18-a03b-0c4ecc9b2c2b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.265381] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 593.265381] env[68217]: value = "task-2960635" [ 593.265381] env[68217]: _type = "Task" [ 593.265381] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 593.276184] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960635, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.458018] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 593.458018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.676s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.458018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.553s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 593.458018] env[68217]: INFO nova.compute.claims [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 593.593058] env[68217]: DEBUG nova.network.neutron [-] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.642176] env[68217]: DEBUG oslo_concurrency.lockutils [None req-227b5fb7-5305-4271-b07d-f5d298aa6edf tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.270s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 593.779140] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960635, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 593.890965] env[68217]: DEBUG nova.compute.manager [None req-fc971604-e448-4fa6-b377-fd0a2c8a5ac6 tempest-ServerExternalEventsTest-2038345449 tempest-ServerExternalEventsTest-2038345449-project] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Received event network-changed {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 593.891269] env[68217]: DEBUG nova.compute.manager [None req-fc971604-e448-4fa6-b377-fd0a2c8a5ac6 tempest-ServerExternalEventsTest-2038345449 tempest-ServerExternalEventsTest-2038345449-project] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Refreshing instance network info cache due to event network-changed. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 593.891560] env[68217]: DEBUG oslo_concurrency.lockutils [None req-fc971604-e448-4fa6-b377-fd0a2c8a5ac6 tempest-ServerExternalEventsTest-2038345449 tempest-ServerExternalEventsTest-2038345449-project] Acquiring lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.892037] env[68217]: DEBUG oslo_concurrency.lockutils [None req-fc971604-e448-4fa6-b377-fd0a2c8a5ac6 tempest-ServerExternalEventsTest-2038345449 tempest-ServerExternalEventsTest-2038345449-project] Acquired lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 593.892396] env[68217]: DEBUG nova.network.neutron [None req-fc971604-e448-4fa6-b377-fd0a2c8a5ac6 tempest-ServerExternalEventsTest-2038345449 tempest-ServerExternalEventsTest-2038345449-project] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 594.097020] env[68217]: INFO nova.compute.manager [-] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Took 1.52 seconds to deallocate network for instance. [ 594.149639] env[68217]: DEBUG nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 594.257804] env[68217]: DEBUG nova.compute.manager [req-7e9c76a2-380d-4bad-b6bb-6614af6945a8 req-200cd96d-5237-4cf7-899c-389bbd4fdda3 service nova] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Received event network-vif-deleted-3f82f80c-ad2c-4e9a-a247-99c0d137f553 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 594.280993] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960635, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.369600] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "f748cf37-6605-49a2-a418-51667a0fac4a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.369600] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "f748cf37-6605-49a2-a418-51667a0fac4a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 594.610075] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.692818] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 594.775506] env[68217]: DEBUG nova.network.neutron [None req-fc971604-e448-4fa6-b377-fd0a2c8a5ac6 tempest-ServerExternalEventsTest-2038345449 tempest-ServerExternalEventsTest-2038345449-project] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Updating instance_info_cache with network_info: [{"id": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "address": "fa:16:3e:e3:44:28", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.174", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8a6a7f38-0a", "ovs_interfaceid": "8a6a7f38-0ada-4d40-9405-e15fe9874407", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.788848] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960635, 'name': PowerOnVM_Task} progress is 82%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.918287] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecb720cf-1641-42bc-aa3f-58fea0327ed5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.929017] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e9e7112-fa2a-43a9-8318-8a8819571515 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.965213] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f2c771-1b81-416e-be07-b8f80b4caf59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.975543] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8742bd6d-0439-4dd6-8923-6e1e5f861675 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.991828] env[68217]: DEBUG nova.compute.provider_tree [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 595.281185] env[68217]: DEBUG oslo_concurrency.lockutils [None req-fc971604-e448-4fa6-b377-fd0a2c8a5ac6 tempest-ServerExternalEventsTest-2038345449 tempest-ServerExternalEventsTest-2038345449-project] Releasing lock "refresh_cache-b0b21c65-ef3d-4492-a6b2-d2321a3dacde" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 595.282659] env[68217]: DEBUG oslo_vmware.api [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960635, 'name': PowerOnVM_Task, 'duration_secs': 1.648432} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 595.282998] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 595.283147] env[68217]: INFO nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Took 7.48 seconds to spawn the instance on the hypervisor. [ 595.283327] env[68217]: DEBUG nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 595.284190] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd89162-fa4a-4aed-b4b9-e926026af0b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.320255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.320255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.320255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 595.320255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 595.320531] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 595.324843] env[68217]: INFO nova.compute.manager [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Terminating instance [ 595.496463] env[68217]: DEBUG nova.scheduler.client.report [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 595.813882] env[68217]: INFO nova.compute.manager [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Took 18.19 seconds to build instance. [ 595.829539] env[68217]: DEBUG nova.compute.manager [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 595.829765] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 595.830834] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0ca44cb-29fb-4d42-9400-a47f880c5ca7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.844525] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 595.844686] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-04134cb2-bd96-4baa-a2de-1ff1e061057d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.854101] env[68217]: DEBUG oslo_vmware.api [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 595.854101] env[68217]: value = "task-2960636" [ 595.854101] env[68217]: _type = "Task" [ 595.854101] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.870534] env[68217]: DEBUG oslo_vmware.api [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960636, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.007222] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.550s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.007222] env[68217]: DEBUG nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 596.012210] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.874s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.013966] env[68217]: INFO nova.compute.claims [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 596.316654] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7b1171ba-3075-4ead-937d-ac10548e4dec tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.574s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 596.365339] env[68217]: DEBUG oslo_vmware.api [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960636, 'name': PowerOffVM_Task, 'duration_secs': 0.310371} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.365608] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 596.365768] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 596.366331] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5cf0224-342f-47f3-a37c-36de2fc51c72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.443165] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 596.443165] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 596.443165] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Deleting the datastore file [datastore2] b0b21c65-ef3d-4492-a6b2-d2321a3dacde {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 596.443165] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42270112-cce7-473c-a170-81cfbb3cc37c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.458580] env[68217]: DEBUG oslo_vmware.api [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for the task: (returnval){ [ 596.458580] env[68217]: value = "task-2960638" [ 596.458580] env[68217]: _type = "Task" [ 596.458580] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.478046] env[68217]: DEBUG oslo_vmware.api [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960638, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 596.527855] env[68217]: DEBUG nova.compute.utils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 596.529526] env[68217]: DEBUG nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 596.529626] env[68217]: DEBUG nova.network.neutron [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 596.682963] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 596.683623] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 596.770320] env[68217]: DEBUG nova.policy [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fbe6e6d71c634dc3acea422e3b7d434a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a7f44d56bcd7463093ba85df1def7774', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 596.820507] env[68217]: DEBUG nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 596.974047] env[68217]: DEBUG oslo_vmware.api [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Task: {'id': task-2960638, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345474} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 596.974575] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 596.974853] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 596.976094] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 596.976471] env[68217]: INFO nova.compute.manager [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Took 1.15 seconds to destroy the instance on the hypervisor. [ 596.976951] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 596.977275] env[68217]: DEBUG nova.compute.manager [-] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 596.977548] env[68217]: DEBUG nova.network.neutron [-] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 597.035731] env[68217]: DEBUG nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 597.354843] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 597.448331] env[68217]: DEBUG nova.network.neutron [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Successfully created port: 8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 597.518814] env[68217]: DEBUG nova.compute.manager [None req-1489366d-2128-435f-abd1-5951786eb068 tempest-ServerDiagnosticsV248Test-317965423 tempest-ServerDiagnosticsV248Test-317965423-project-admin] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 597.523414] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39d92d81-327e-4220-b56d-2d5c3e480862 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.526916] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a70481-c549-40c8-b808-8a8ef5a0f3be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.539854] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d125c684-dc26-4f7e-8bfa-3e91caf5ba0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.548067] env[68217]: INFO nova.compute.manager [None req-1489366d-2128-435f-abd1-5951786eb068 tempest-ServerDiagnosticsV248Test-317965423 tempest-ServerDiagnosticsV248Test-317965423-project-admin] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Retrieving diagnostics [ 597.549602] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d1dfecd-2615-40a0-ad56-1ed21d4f35d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.618980] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4fddad-5b74-4a87-89fa-116cc83414ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.628472] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49027a75-30ab-4261-ac4a-cbf79b55b08d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.647435] env[68217]: DEBUG nova.compute.provider_tree [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 598.055300] env[68217]: DEBUG nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 598.094584] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 598.094837] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 598.094993] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 598.095199] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 598.095343] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 598.095486] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 598.095685] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 598.096213] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 598.096213] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 598.096932] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 598.097110] env[68217]: DEBUG nova.virt.hardware [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 598.099873] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e681e259-9595-4a9b-9151-78e36d0165e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.109956] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-553b853a-f882-4172-b326-51b228861f4b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.151869] env[68217]: DEBUG nova.network.neutron [-] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 598.153664] env[68217]: DEBUG nova.scheduler.client.report [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 598.657431] env[68217]: INFO nova.compute.manager [-] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Took 1.68 seconds to deallocate network for instance. [ 598.658259] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.646s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.658732] env[68217]: DEBUG nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 598.666463] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.661s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.666672] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.668796] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.543s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.669027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 598.670917] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.040s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 598.672337] env[68217]: INFO nova.compute.claims [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.720859] env[68217]: INFO nova.scheduler.client.report [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Deleted allocations for instance 471e8a27-ed87-461a-b817-cd5ad208dd10 [ 598.725684] env[68217]: INFO nova.scheduler.client.report [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Deleted allocations for instance cdc84742-e20a-4e48-bfff-b3ac34405c1d [ 598.942277] env[68217]: DEBUG nova.compute.manager [req-f567fb5d-fae1-4d9d-8c3d-30278b284987 req-96acb3d2-9c48-4e1f-ae7a-19b7047771c3 service nova] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Received event network-vif-deleted-8a6a7f38-0ada-4d40-9405-e15fe9874407 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 599.171633] env[68217]: DEBUG nova.compute.utils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 599.177269] env[68217]: DEBUG nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 599.177269] env[68217]: DEBUG nova.network.neutron [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 599.183770] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 599.239807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-118e92e4-692f-4a77-8b65-37a8a65e8a23 tempest-DeleteServersAdminTestJSON-1113498397 tempest-DeleteServersAdminTestJSON-1113498397-project-admin] Lock "471e8a27-ed87-461a-b817-cd5ad208dd10" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.155s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.244021] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c414d553-0470-4ab8-a36d-0330b23100bc tempest-ServerDiagnosticsTest-1228139958 tempest-ServerDiagnosticsTest-1228139958-project-member] Lock "cdc84742-e20a-4e48-bfff-b3ac34405c1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.621s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 599.289663] env[68217]: DEBUG nova.policy [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a08655801191470cba0ff12bbb875225', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'aa6ecdf4d8874a98bd3db5fc64456622', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 599.680560] env[68217]: DEBUG nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 599.840012] env[68217]: DEBUG nova.network.neutron [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Successfully updated port: 8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 599.852706] env[68217]: DEBUG nova.network.neutron [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Successfully created port: 199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 600.257427] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bd02b2d-b438-4591-8f93-f9f630fb2f15 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.272972] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f73de45d-3f10-4dd8-84cb-b146739c0e80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.318100] env[68217]: INFO nova.compute.manager [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Rebuilding instance [ 600.321734] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1ea1bb-bbd6-4120-b35b-6cfe4938fa34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.342537] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8c731e-cc37-427e-8f0f-fb6768be8ec4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.350493] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "refresh_cache-bbd282ea-58aa-47b8-aa82-283a55ac1b29" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 600.350607] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquired lock "refresh_cache-bbd282ea-58aa-47b8-aa82-283a55ac1b29" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 600.350762] env[68217]: DEBUG nova.network.neutron [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 600.352536] env[68217]: DEBUG nova.compute.manager [req-11a7f4ca-1065-4c9e-97c8-7e17c5c4cd6b req-750a3b8e-82d4-4727-91d1-39252ac6e462 service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Received event network-vif-plugged-8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 600.352724] env[68217]: DEBUG oslo_concurrency.lockutils [req-11a7f4ca-1065-4c9e-97c8-7e17c5c4cd6b req-750a3b8e-82d4-4727-91d1-39252ac6e462 service nova] Acquiring lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.352912] env[68217]: DEBUG oslo_concurrency.lockutils [req-11a7f4ca-1065-4c9e-97c8-7e17c5c4cd6b req-750a3b8e-82d4-4727-91d1-39252ac6e462 service nova] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.353089] env[68217]: DEBUG oslo_concurrency.lockutils [req-11a7f4ca-1065-4c9e-97c8-7e17c5c4cd6b req-750a3b8e-82d4-4727-91d1-39252ac6e462 service nova] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 600.353260] env[68217]: DEBUG nova.compute.manager [req-11a7f4ca-1065-4c9e-97c8-7e17c5c4cd6b req-750a3b8e-82d4-4727-91d1-39252ac6e462 service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] No waiting events found dispatching network-vif-plugged-8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 600.353420] env[68217]: WARNING nova.compute.manager [req-11a7f4ca-1065-4c9e-97c8-7e17c5c4cd6b req-750a3b8e-82d4-4727-91d1-39252ac6e462 service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Received unexpected event network-vif-plugged-8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 for instance with vm_state building and task_state spawning. [ 600.369403] env[68217]: DEBUG nova.compute.provider_tree [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.392272] env[68217]: DEBUG nova.compute.manager [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 600.393152] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9204a70-4e74-4991-90dd-d136a035b427 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.699736] env[68217]: DEBUG nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 600.732974] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 600.733149] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.733926] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 600.733926] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.734070] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 600.734257] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 600.734493] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 600.734664] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 600.735804] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 600.735804] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 600.735804] env[68217]: DEBUG nova.virt.hardware [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 600.736306] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b8ef2f0-e395-4b0e-92d8-e44a6de5b4d3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.745888] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cb4b0cc-c602-4803-8338-1fd036217bc2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.829091] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "6113feaf-5c21-49c3-9c19-ea10b60786d3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 600.829330] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "6113feaf-5c21-49c3-9c19-ea10b60786d3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 600.881578] env[68217]: DEBUG nova.scheduler.client.report [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 600.946022] env[68217]: DEBUG nova.network.neutron [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 601.382816] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.712s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 601.383413] env[68217]: DEBUG nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 601.387940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.259s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.389569] env[68217]: INFO nova.compute.claims [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 601.410590] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 601.413666] env[68217]: DEBUG nova.network.neutron [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Updating instance_info_cache with network_info: [{"id": "8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1", "address": "fa:16:3e:f3:46:9d", "network": {"id": "eb95ccfc-660f-4f27-ad54-441ed72afe53", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1612882946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7f44d56bcd7463093ba85df1def7774", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1504f0-1a", "ovs_interfaceid": "8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 601.416219] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9fbca5a-7a03-45f1-93a7-23ddc0d15bef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.425974] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 601.425974] env[68217]: value = "task-2960639" [ 601.425974] env[68217]: _type = "Task" [ 601.425974] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.444444] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960639, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.752705] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "366c780a-2870-4e6e-8cfe-7eec10c363d5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 601.752992] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 601.893730] env[68217]: DEBUG nova.compute.utils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 601.899204] env[68217]: DEBUG nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 601.917040] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Releasing lock "refresh_cache-bbd282ea-58aa-47b8-aa82-283a55ac1b29" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 601.917349] env[68217]: DEBUG nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Instance network_info: |[{"id": "8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1", "address": "fa:16:3e:f3:46:9d", "network": {"id": "eb95ccfc-660f-4f27-ad54-441ed72afe53", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1612882946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7f44d56bcd7463093ba85df1def7774", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1504f0-1a", "ovs_interfaceid": "8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 601.917848] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f3:46:9d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f5f49a7d-c6e5-404f-b71a-91d8c070cd18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 601.931124] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Creating folder: Project (a7f44d56bcd7463093ba85df1def7774). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.931124] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-caaa7078-d855-40fa-9abd-b1f315c8e6ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.950030] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960639, 'name': PowerOffVM_Task, 'duration_secs': 0.19015} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.951580] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 601.954432] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 601.954432] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Created folder: Project (a7f44d56bcd7463093ba85df1def7774) in parent group-v594094. [ 601.954432] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Creating folder: Instances. Parent ref: group-v594127. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 601.954432] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-939fbf41-00be-46ee-aff1-626816f921c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.956305] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfb080c5-d1b0-446f-a59a-a6aabe4a232d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.968225] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 601.968500] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb18bf5d-11fa-4aac-bee0-7ca4b3a79441 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.973876] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Created folder: Instances in parent group-v594127. [ 601.974132] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 601.974321] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 601.974529] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-44426516-7cbb-4523-8d32-6b0bf825a7dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.004403] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 602.004789] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 602.006497] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Deleting the datastore file [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 602.008902] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-00a93357-1990-4512-bfd1-b3b40ba16c6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.010026] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 602.010026] env[68217]: value = "task-2960643" [ 602.010026] env[68217]: _type = "Task" [ 602.010026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.017379] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 602.017379] env[68217]: value = "task-2960644" [ 602.017379] env[68217]: _type = "Task" [ 602.017379] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.028382] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960643, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.038697] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960644, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.172367] env[68217]: DEBUG nova.network.neutron [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Successfully updated port: 199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 602.400753] env[68217]: DEBUG nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 602.533190] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960643, 'name': CreateVM_Task, 'duration_secs': 0.449798} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.534294] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 602.535416] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.536095] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.536732] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 602.546023] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe45ae06-d5d4-43b8-a345-e1679aa81493 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.546023] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960644, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.116884} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.547725] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 602.548114] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 602.548597] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 602.559543] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 602.559543] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e4171-1cc4-a2fa-9e9a-85b6602c67c5" [ 602.559543] env[68217]: _type = "Task" [ 602.559543] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.574801] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e4171-1cc4-a2fa-9e9a-85b6602c67c5, 'name': SearchDatastore_Task, 'duration_secs': 0.013312} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.578127] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 602.578127] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 602.579094] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.579094] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.579094] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 602.579094] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-01bc1b57-a525-4bfa-adc8-867d118e542d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.590250] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 602.591205] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 602.592455] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-303cf5b1-cf17-4ca7-9de8-36c9f4d1374b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.601782] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 602.601782] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527b825c-c441-61ae-c8b9-074551a3ac53" [ 602.601782] env[68217]: _type = "Task" [ 602.601782] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.623603] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527b825c-c441-61ae-c8b9-074551a3ac53, 'name': SearchDatastore_Task, 'duration_secs': 0.010959} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.624427] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-894efaba-adca-4d06-b374-2b6305d048ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.633475] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 602.633475] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521f501f-560e-4a05-01d3-b967623355f8" [ 602.633475] env[68217]: _type = "Task" [ 602.633475] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.643485] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521f501f-560e-4a05-01d3-b967623355f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.674447] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.674792] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquired lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 602.675352] env[68217]: DEBUG nova.network.neutron [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 602.767917] env[68217]: DEBUG nova.compute.manager [req-8a8611b6-a746-4e2b-93ee-f038bb233e80 req-0ea2c1a1-effa-42cd-9736-531a340274ab service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Received event network-vif-plugged-199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 602.768380] env[68217]: DEBUG oslo_concurrency.lockutils [req-8a8611b6-a746-4e2b-93ee-f038bb233e80 req-0ea2c1a1-effa-42cd-9736-531a340274ab service nova] Acquiring lock "af11d05f-4432-4505-bb52-226414488960-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 602.768608] env[68217]: DEBUG oslo_concurrency.lockutils [req-8a8611b6-a746-4e2b-93ee-f038bb233e80 req-0ea2c1a1-effa-42cd-9736-531a340274ab service nova] Lock "af11d05f-4432-4505-bb52-226414488960-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 602.768872] env[68217]: DEBUG oslo_concurrency.lockutils [req-8a8611b6-a746-4e2b-93ee-f038bb233e80 req-0ea2c1a1-effa-42cd-9736-531a340274ab service nova] Lock "af11d05f-4432-4505-bb52-226414488960-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 602.769030] env[68217]: DEBUG nova.compute.manager [req-8a8611b6-a746-4e2b-93ee-f038bb233e80 req-0ea2c1a1-effa-42cd-9736-531a340274ab service nova] [instance: af11d05f-4432-4505-bb52-226414488960] No waiting events found dispatching network-vif-plugged-199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 602.769928] env[68217]: WARNING nova.compute.manager [req-8a8611b6-a746-4e2b-93ee-f038bb233e80 req-0ea2c1a1-effa-42cd-9736-531a340274ab service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Received unexpected event network-vif-plugged-199674eb-b628-4b78-a622-1e10863e5716 for instance with vm_state building and task_state spawning. [ 602.953604] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2726ffe8-9a88-4b82-81e6-5d0de5f6907c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.960200] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0afe73d7-25f3-47bd-8a21-690b05595008 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.003201] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9f97b3-9973-4464-82bb-6cbaa6af8f18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.009057] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "cf457d43-b939-4284-b84d-9075895e9dda" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 603.009306] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "cf457d43-b939-4284-b84d-9075895e9dda" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 603.015272] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d14b543b-2241-42aa-87f1-460e779a3a3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.035605] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 603.147014] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521f501f-560e-4a05-01d3-b967623355f8, 'name': SearchDatastore_Task, 'duration_secs': 0.01146} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 603.150995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 603.151315] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] bbd282ea-58aa-47b8-aa82-283a55ac1b29/bbd282ea-58aa-47b8-aa82-283a55ac1b29.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 603.151599] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cf61d155-2054-4e8f-bbe1-3ef1abd07bd5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.166061] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 603.166061] env[68217]: value = "task-2960645" [ 603.166061] env[68217]: _type = "Task" [ 603.166061] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.185479] env[68217]: DEBUG nova.compute.manager [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Received event network-changed-8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 603.185670] env[68217]: DEBUG nova.compute.manager [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Refreshing instance network info cache due to event network-changed-8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 603.185905] env[68217]: DEBUG oslo_concurrency.lockutils [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] Acquiring lock "refresh_cache-bbd282ea-58aa-47b8-aa82-283a55ac1b29" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.186049] env[68217]: DEBUG oslo_concurrency.lockutils [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] Acquired lock "refresh_cache-bbd282ea-58aa-47b8-aa82-283a55ac1b29" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 603.186203] env[68217]: DEBUG nova.network.neutron [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Refreshing network info cache for port 8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 603.195496] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960645, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.419437] env[68217]: DEBUG nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 603.451984] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 603.452256] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.452413] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 603.453142] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.453142] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 603.453142] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 603.453871] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 603.453871] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 603.454323] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 603.454565] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 603.455171] env[68217]: DEBUG nova.virt.hardware [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 603.457137] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14078220-391e-4138-bcdc-07169de8a5a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.473952] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbcef6d6-bb73-4df2-863b-8eda75fa91b4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.499297] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.507553] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 603.507698] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 603.507866] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5e3abe56-bc77-4c05-954c-031f807f5240 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.540026] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 603.540026] env[68217]: value = "task-2960646" [ 603.540026] env[68217]: _type = "Task" [ 603.540026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.540026] env[68217]: DEBUG nova.network.neutron [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 603.542216] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 603.564063] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960646, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.592597] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 603.592952] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.593415] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 603.593870] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.593870] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 603.594016] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 603.594226] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 603.594337] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 603.594517] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 603.594672] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 603.595484] env[68217]: DEBUG nova.virt.hardware [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 603.596080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45107a3-ffde-4b2c-bec5-13e56581eda6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.618505] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930f8918-ab95-4615-b308-adacb123cc0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.641665] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 603.646526] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 603.646861] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 603.647117] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7a533581-4ea1-466b-9eeb-8efde8327223 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.671867] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 603.671867] env[68217]: value = "task-2960647" [ 603.671867] env[68217]: _type = "Task" [ 603.671867] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 603.679592] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960645, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 603.687822] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960647, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.051498] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.664s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 604.052571] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 604.058645] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.257s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 604.059231] env[68217]: DEBUG nova.objects.instance [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lazy-loading 'resources' on Instance uuid 71dd4921-5859-421f-9e31-e9800adc9e3c {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 604.060029] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960646, 'name': CreateVM_Task, 'duration_secs': 0.467078} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.060385] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 604.060785] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.060935] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.061259] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 604.061728] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e40f598-a2ca-4a6f-9246-3dbf86fe2453 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.068260] env[68217]: DEBUG nova.network.neutron [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Updating instance_info_cache with network_info: [{"id": "199674eb-b628-4b78-a622-1e10863e5716", "address": "fa:16:3e:c9:de:e6", "network": {"id": "a4ff5577-8c9a-49e8-89d7-e86362ebdaa3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-93198499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6ecdf4d8874a98bd3db5fc64456622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199674eb-b6", "ovs_interfaceid": "199674eb-b628-4b78-a622-1e10863e5716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.070365] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 604.070365] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5255edfd-d760-9c7a-5ff0-29cc8f9ae579" [ 604.070365] env[68217]: _type = "Task" [ 604.070365] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.089413] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5255edfd-d760-9c7a-5ff0-29cc8f9ae579, 'name': SearchDatastore_Task, 'duration_secs': 0.010586} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.089413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.089413] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 604.089413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.089557] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 604.089557] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 604.089557] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e585ccdd-e7f5-4463-90c9-ccc7814e1b26 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.099565] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 604.099700] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 604.100689] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4c4e4e1-9ce2-463a-b543-8e7b2c1d87e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.108663] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 604.108663] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520349ba-e6ba-6b10-8a19-a21da67ec321" [ 604.108663] env[68217]: _type = "Task" [ 604.108663] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.119339] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520349ba-e6ba-6b10-8a19-a21da67ec321, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.123129] env[68217]: DEBUG nova.network.neutron [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Updated VIF entry in instance network info cache for port 8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 604.123129] env[68217]: DEBUG nova.network.neutron [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Updating instance_info_cache with network_info: [{"id": "8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1", "address": "fa:16:3e:f3:46:9d", "network": {"id": "eb95ccfc-660f-4f27-ad54-441ed72afe53", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1612882946-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a7f44d56bcd7463093ba85df1def7774", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f5f49a7d-c6e5-404f-b71a-91d8c070cd18", "external-id": "nsx-vlan-transportzone-120", "segmentation_id": 120, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8b1504f0-1a", "ovs_interfaceid": "8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.184624] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960647, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.190094] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960645, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.53818} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.190094] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] bbd282ea-58aa-47b8-aa82-283a55ac1b29/bbd282ea-58aa-47b8-aa82-283a55ac1b29.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 604.190094] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 604.190094] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-feb3852a-6e44-4c15-81b3-9d28068c8a88 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.200165] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 604.200165] env[68217]: value = "task-2960648" [ 604.200165] env[68217]: _type = "Task" [ 604.200165] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.208649] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960648, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.562405] env[68217]: DEBUG nova.compute.utils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 604.568805] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 604.569064] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 604.572252] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Releasing lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.572252] env[68217]: DEBUG nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Instance network_info: |[{"id": "199674eb-b628-4b78-a622-1e10863e5716", "address": "fa:16:3e:c9:de:e6", "network": {"id": "a4ff5577-8c9a-49e8-89d7-e86362ebdaa3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-93198499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6ecdf4d8874a98bd3db5fc64456622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199674eb-b6", "ovs_interfaceid": "199674eb-b628-4b78-a622-1e10863e5716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 604.573615] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:de:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '199674eb-b628-4b78-a622-1e10863e5716', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 604.585609] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Creating folder: Project (aa6ecdf4d8874a98bd3db5fc64456622). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.586835] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e86294de-1e39-4fad-b601-5e8e2c7d3e4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.605927] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Created folder: Project (aa6ecdf4d8874a98bd3db5fc64456622) in parent group-v594094. [ 604.605927] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Creating folder: Instances. Parent ref: group-v594132. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 604.605927] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-21a913e0-d2db-4545-a835-7e8cfb0096a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.623485] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Created folder: Instances in parent group-v594132. [ 604.623732] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 604.623948] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af11d05f-4432-4505-bb52-226414488960] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 604.624414] env[68217]: DEBUG oslo_concurrency.lockutils [req-8a089d16-dcb7-4c04-9f33-a4198c8aa605 req-375215fb-1da2-441a-853e-0746a7a1f3ed service nova] Releasing lock "refresh_cache-bbd282ea-58aa-47b8-aa82-283a55ac1b29" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.625340] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a170d63c-ee36-49bb-9905-8fec0059cd4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.654039] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520349ba-e6ba-6b10-8a19-a21da67ec321, 'name': SearchDatastore_Task, 'duration_secs': 0.010292} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.654868] env[68217]: DEBUG nova.policy [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '348bde8560e04eee89f39a3a842e173b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '734fb9662f984c239bc1648eb0033ff4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 604.658579] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea8d16c6-2d1b-46d6-8d11-a527eada0d89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.664035] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 604.664035] env[68217]: value = "task-2960651" [ 604.664035] env[68217]: _type = "Task" [ 604.664035] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.668782] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 604.668782] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d2b6e5-babd-2f02-b2c8-557d589dcd8b" [ 604.668782] env[68217]: _type = "Task" [ 604.668782] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.676225] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960651, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.686104] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d2b6e5-babd-2f02-b2c8-557d589dcd8b, 'name': SearchDatastore_Task, 'duration_secs': 0.010531} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 604.686756] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 604.687057] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 604.687360] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-abee2c27-9003-40f9-a77d-b9ca3d874205 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.694168] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960647, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.697426] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 604.697426] env[68217]: value = "task-2960652" [ 604.697426] env[68217]: _type = "Task" [ 604.697426] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 604.710941] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960652, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 604.718263] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960648, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.072526] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 605.107296] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a9ce2b1-57b8-4b3d-969d-2cb58cdf1819 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.118764] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef70b30-ca90-4e08-b082-a45b4cd5a67b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.159259] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f80ea31-80f6-45f2-914e-219ad719e0bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.174034] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318a1fdb-8d7e-4cb1-8a95-778da0a07779 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.182185] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960651, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.183392] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Successfully created port: 800cf755-2034-482c-a604-63fbfe457f26 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 605.197195] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960647, 'name': CreateVM_Task, 'duration_secs': 1.431701} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.204768] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 605.205446] env[68217]: DEBUG nova.compute.provider_tree [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.207684] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.207782] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.208229] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 605.214417] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7139334f-323e-4d2f-bd86-7a5acb303580 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.220941] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 605.220941] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527458bc-8b02-3195-6f44-3b295c732cdd" [ 605.220941] env[68217]: _type = "Task" [ 605.220941] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.224306] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960648, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.905956} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.231065] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 605.231378] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960652, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.232480] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1a9d005-f796-45b4-bfc5-086bce847b18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.243593] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527458bc-8b02-3195-6f44-3b295c732cdd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.263907] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Reconfiguring VM instance instance-0000000d to attach disk [datastore1] bbd282ea-58aa-47b8-aa82-283a55ac1b29/bbd282ea-58aa-47b8-aa82-283a55ac1b29.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 605.265394] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-49c57e90-fa57-4387-9662-e6185e3a209c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.289366] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 605.289366] env[68217]: value = "task-2960653" [ 605.289366] env[68217]: _type = "Task" [ 605.289366] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.299751] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960653, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.678769] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960651, 'name': CreateVM_Task, 'duration_secs': 0.918382} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.678769] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: af11d05f-4432-4505-bb52-226414488960] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 605.681560] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.714057] env[68217]: DEBUG nova.scheduler.client.report [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 605.728871] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960652, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.792951} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.733821] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 605.734051] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 605.734384] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a321271c-0a73-4c6c-bc06-8d5b6ab47130 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.745991] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527458bc-8b02-3195-6f44-3b295c732cdd, 'name': SearchDatastore_Task, 'duration_secs': 0.06264} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.748807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.748807] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.748807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.748807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.749080] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.749080] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 605.749080] env[68217]: value = "task-2960654" [ 605.749080] env[68217]: _type = "Task" [ 605.749080] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.749080] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.749522] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 605.749639] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a16ae93-71cb-4a37-a0a2-0b6e845e3e9c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.751912] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-501c8d0b-f1a4-427c-b834-3b06fc1a3383 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.765810] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 605.765810] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528ff213-e9de-8cb3-4da5-a61794427734" [ 605.765810] env[68217]: _type = "Task" [ 605.765810] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.766532] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960654, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.767705] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.767891] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 605.773457] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a6b9348-a789-449b-94c8-702059328164 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.782108] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528ff213-e9de-8cb3-4da5-a61794427734, 'name': SearchDatastore_Task, 'duration_secs': 0.010783} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.783519] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.783780] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 605.784013] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.784533] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 605.784533] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5202ba7f-35ff-9c9c-32d1-ecf495fce4d2" [ 605.784533] env[68217]: _type = "Task" [ 605.784533] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.799312] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5202ba7f-35ff-9c9c-32d1-ecf495fce4d2, 'name': SearchDatastore_Task, 'duration_secs': 0.013154} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.802599] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0f991eb3-75c2-4ec9-baab-7121b9a66ff2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.812404] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960653, 'name': ReconfigVM_Task, 'duration_secs': 0.33543} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.814392] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Reconfigured VM instance instance-0000000d to attach disk [datastore1] bbd282ea-58aa-47b8-aa82-283a55ac1b29/bbd282ea-58aa-47b8-aa82-283a55ac1b29.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 605.815531] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 605.815531] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a56c60-7c24-ceea-d16b-cfc495232c5b" [ 605.815531] env[68217]: _type = "Task" [ 605.815531] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.815531] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-713b51c5-f380-498d-b616-47c0a43d5de0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.827815] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 605.827815] env[68217]: value = "task-2960655" [ 605.827815] env[68217]: _type = "Task" [ 605.827815] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.831499] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a56c60-7c24-ceea-d16b-cfc495232c5b, 'name': SearchDatastore_Task, 'duration_secs': 0.009932} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.835338] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 605.835338] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 605.835338] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 605.835589] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 605.835767] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-370a04c1-8f3f-4ef2-bd37-42d4bb9f2244 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.839903] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a407dd77-7a9a-41cb-84fe-8d447ed8fa5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.848098] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960655, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.855982] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 605.855982] env[68217]: value = "task-2960656" [ 605.855982] env[68217]: _type = "Task" [ 605.855982] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.857847] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 605.860821] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 605.866427] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-657a9c14-671f-47f1-8e52-9d540be05457 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.873988] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 605.873988] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522cd96a-0d8a-22af-87db-4518ad5ed6de" [ 605.873988] env[68217]: _type = "Task" [ 605.873988] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.876383] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960656, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.889909] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522cd96a-0d8a-22af-87db-4518ad5ed6de, 'name': SearchDatastore_Task, 'duration_secs': 0.010102} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.890773] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1dc849ee-945b-43ea-9b4b-924212f9ab07 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.897937] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 605.897937] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d55989-8634-c712-ab39-a08c11031230" [ 605.897937] env[68217]: _type = "Task" [ 605.897937] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.912242] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d55989-8634-c712-ab39-a08c11031230, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.041471] env[68217]: DEBUG nova.compute.manager [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Received event network-changed-199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 606.041471] env[68217]: DEBUG nova.compute.manager [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Refreshing instance network info cache due to event network-changed-199674eb-b628-4b78-a622-1e10863e5716. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 606.041471] env[68217]: DEBUG oslo_concurrency.lockutils [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] Acquiring lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 606.041471] env[68217]: DEBUG oslo_concurrency.lockutils [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] Acquired lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 606.041471] env[68217]: DEBUG nova.network.neutron [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Refreshing network info cache for port 199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 606.083970] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 606.126534] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 606.126719] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 606.126812] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 606.130315] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 606.130315] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 606.130315] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 606.130315] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 606.130315] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 606.132383] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 606.132383] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 606.132383] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 606.132383] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-634650ce-1f68-4b5d-a8ff-e88f1dafdf1f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.145343] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260ffa45-7625-4a0a-88c4-e8da01ae955c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.194335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "1675982e-0702-482b-9fe6-fd4eb9d83311" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 606.194335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.222596] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.164s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.226334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.403s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 606.227170] env[68217]: INFO nova.compute.claims [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 606.261948] env[68217]: INFO nova.scheduler.client.report [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Deleted allocations for instance 71dd4921-5859-421f-9e31-e9800adc9e3c [ 606.279149] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960654, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.170341} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.279149] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 606.279149] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a52baf6a-58d9-4cde-ab20-98f49a56a9d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.307693] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 606.308952] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6e44538-91ec-49f9-b1a2-0085bb051163 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.338020] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 606.338020] env[68217]: value = "task-2960657" [ 606.338020] env[68217]: _type = "Task" [ 606.338020] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.346216] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960655, 'name': Rename_Task, 'duration_secs': 0.180345} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.346460] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 606.346699] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a99c7d2-ec31-4a64-b8a1-5c766ecf33a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.356330] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960657, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.357986] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 606.357986] env[68217]: value = "task-2960658" [ 606.357986] env[68217]: _type = "Task" [ 606.357986] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.374510] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.375151] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960656, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.423222] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d55989-8634-c712-ab39-a08c11031230, 'name': SearchDatastore_Task, 'duration_secs': 0.011034} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.423532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 606.423765] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] af11d05f-4432-4505-bb52-226414488960/af11d05f-4432-4505-bb52-226414488960.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 606.424043] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-684dfe85-0638-42f5-ae57-55fc31627556 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.433357] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 606.433357] env[68217]: value = "task-2960659" [ 606.433357] env[68217]: _type = "Task" [ 606.433357] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.443975] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960659, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.778488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-22c8acc1-d7b8-48f9-8a98-1aafc48ff6fd tempest-TenantUsagesTestJSON-404402051 tempest-TenantUsagesTestJSON-404402051-project-member] Lock "71dd4921-5859-421f-9e31-e9800adc9e3c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.783s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 606.855251] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960657, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.880880] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960656, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536629} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.884305] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 606.884540] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 606.884810] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960658, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.885127] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c0179ee-0478-438e-8285-0e3d17efc185 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.895819] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 606.895819] env[68217]: value = "task-2960660" [ 606.895819] env[68217]: _type = "Task" [ 606.895819] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 606.908491] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960660, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.947158] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960659, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.293199] env[68217]: DEBUG nova.network.neutron [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Updated VIF entry in instance network info cache for port 199674eb-b628-4b78-a622-1e10863e5716. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 607.293199] env[68217]: DEBUG nova.network.neutron [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Updating instance_info_cache with network_info: [{"id": "199674eb-b628-4b78-a622-1e10863e5716", "address": "fa:16:3e:c9:de:e6", "network": {"id": "a4ff5577-8c9a-49e8-89d7-e86362ebdaa3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-93198499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6ecdf4d8874a98bd3db5fc64456622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199674eb-b6", "ovs_interfaceid": "199674eb-b628-4b78-a622-1e10863e5716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.358285] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960657, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.383610] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960658, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.427022] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960660, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.411571} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.427022] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 607.427022] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77e4da53-24b7-4634-af49-b6e5d8244502 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.466025] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Reconfiguring VM instance instance-0000000b to attach disk [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 607.471990] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60bd77ad-9ebd-4b84-b735-79dc15e615f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.493869] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960659, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.016375} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.494686] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] af11d05f-4432-4505-bb52-226414488960/af11d05f-4432-4505-bb52-226414488960.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 607.494899] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 607.495599] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9bb8d7a7-19a0-4388-97ef-d367c09709ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.504365] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 607.504365] env[68217]: value = "task-2960661" [ 607.504365] env[68217]: _type = "Task" [ 607.504365] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.505892] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 607.505892] env[68217]: value = "task-2960662" [ 607.505892] env[68217]: _type = "Task" [ 607.505892] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.525951] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960661, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.526282] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960662, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.638271] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Successfully updated port: 800cf755-2034-482c-a604-63fbfe457f26 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 607.797443] env[68217]: DEBUG oslo_concurrency.lockutils [req-77fb6530-2280-4eff-9200-62cb800c3bdd req-49813b71-573d-4aea-ba31-5ea712bfe704 service nova] Releasing lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 607.859043] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960657, 'name': ReconfigVM_Task, 'duration_secs': 1.120047} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 607.859349] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 607.860050] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5d085d16-2d55-43c1-8560-47848a6f6cb3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.879705] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 607.879705] env[68217]: value = "task-2960663" [ 607.879705] env[68217]: _type = "Task" [ 607.879705] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.889692] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960658, 'name': PowerOnVM_Task} progress is 1%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.893859] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb917ef3-1d9c-4a4a-b550-e9cec73c1b75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.899412] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960663, 'name': Rename_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.907461] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925aa8a4-75d8-4aa1-a0b6-4e1e0ff85783 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.955173] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-140d5121-2c82-4234-b42f-363e42052ab8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.962449] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea8e2a70-b546-4490-badb-9900c568ee5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.981621] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 608.017651] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960661, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.026107] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960662, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.142467] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "refresh_cache-aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.142467] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "refresh_cache-aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 608.142467] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 608.389087] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960658, 'name': PowerOnVM_Task} progress is 64%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.401352] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960663, 'name': Rename_Task, 'duration_secs': 0.311438} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.401566] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 608.401898] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7ba5d656-2cc0-4213-b2b1-34c485332d25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.429325] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 608.429325] env[68217]: value = "task-2960664" [ 608.429325] env[68217]: _type = "Task" [ 608.429325] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.440601] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960664, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.512855] env[68217]: ERROR nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [req-490e6035-985e-4b53-9ef7-b0261a701b9d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-490e6035-985e-4b53-9ef7-b0261a701b9d"}]} [ 608.525444] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960662, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.890286} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.529641] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 608.529776] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960661, 'name': ReconfigVM_Task, 'duration_secs': 0.675168} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.530877] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3edc9e-234e-4f51-a9be-c0606d69deaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.535329] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Reconfigured VM instance instance-0000000b to attach disk [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba/ae5fa3f4-e487-40ed-9ca4-12a6f9713eba.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 608.535475] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f156b6c2-7666-47c4-94ed-1641794a482d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.563699] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] af11d05f-4432-4505-bb52-226414488960/af11d05f-4432-4505-bb52-226414488960.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 608.567119] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 608.572717] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f4f3e981-8d00-47b4-8827-64ba7c45dd2b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.593085] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 608.593085] env[68217]: value = "task-2960665" [ 608.593085] env[68217]: _type = "Task" [ 608.593085] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.605429] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 608.605429] env[68217]: value = "task-2960666" [ 608.605429] env[68217]: _type = "Task" [ 608.605429] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.605926] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960665, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.616823] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960666, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.618155] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 608.618394] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 608.644437] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 608.678953] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 608.722726] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 608.832767] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.833704] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.888626] env[68217]: DEBUG oslo_vmware.api [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960658, 'name': PowerOnVM_Task, 'duration_secs': 2.153603} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.891502] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 608.891732] env[68217]: INFO nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Took 10.84 seconds to spawn the instance on the hypervisor. [ 608.891912] env[68217]: DEBUG nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 608.892825] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81ac4489-bc37-4e0d-9201-2f79e700f1b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.918694] env[68217]: DEBUG nova.compute.manager [req-87ce6279-874e-4eb7-a1c2-6b9851844de9 req-50eb5c27-a183-4bbc-9f83-fc8889309880 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Received event network-vif-plugged-800cf755-2034-482c-a604-63fbfe457f26 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 608.918803] env[68217]: DEBUG oslo_concurrency.lockutils [req-87ce6279-874e-4eb7-a1c2-6b9851844de9 req-50eb5c27-a183-4bbc-9f83-fc8889309880 service nova] Acquiring lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 608.919340] env[68217]: DEBUG oslo_concurrency.lockutils [req-87ce6279-874e-4eb7-a1c2-6b9851844de9 req-50eb5c27-a183-4bbc-9f83-fc8889309880 service nova] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 608.920920] env[68217]: DEBUG oslo_concurrency.lockutils [req-87ce6279-874e-4eb7-a1c2-6b9851844de9 req-50eb5c27-a183-4bbc-9f83-fc8889309880 service nova] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 608.920920] env[68217]: DEBUG nova.compute.manager [req-87ce6279-874e-4eb7-a1c2-6b9851844de9 req-50eb5c27-a183-4bbc-9f83-fc8889309880 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] No waiting events found dispatching network-vif-plugged-800cf755-2034-482c-a604-63fbfe457f26 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 608.920920] env[68217]: WARNING nova.compute.manager [req-87ce6279-874e-4eb7-a1c2-6b9851844de9 req-50eb5c27-a183-4bbc-9f83-fc8889309880 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Received unexpected event network-vif-plugged-800cf755-2034-482c-a604-63fbfe457f26 for instance with vm_state building and task_state spawning. [ 608.943837] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960664, 'name': PowerOnVM_Task} progress is 81%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.032320] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Updating instance_info_cache with network_info: [{"id": "800cf755-2034-482c-a604-63fbfe457f26", "address": "fa:16:3e:95:b6:ca", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap800cf755-20", "ovs_interfaceid": "800cf755-2034-482c-a604-63fbfe457f26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.107748] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960665, 'name': Rename_Task, 'duration_secs': 0.258769} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.112941] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 609.117896] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62c4082e-6670-4b3d-b157-d6df2ce9fd3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.127882] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 609.127882] env[68217]: value = "task-2960667" [ 609.127882] env[68217]: _type = "Task" [ 609.127882] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.132107] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960666, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.230682] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8742c249-d96d-40f7-9171-6a0e27d09070 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.243582] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6b7061-9839-49be-be61-487f014025e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.286021] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc0842d1-5916-46c3-9a45-105dc9ba57a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.295401] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c9b010-a38b-4494-b036-0a7122d3f3dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.311669] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.335449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.335701] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.385096] env[68217]: DEBUG nova.compute.manager [None req-b07cb04f-4b61-447a-9392-db5da4f592c3 tempest-ServerDiagnosticsV248Test-317965423 tempest-ServerDiagnosticsV248Test-317965423-project-admin] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.386478] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed6727c-9efa-4cf6-ad8a-ed9d9d68a5f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.394869] env[68217]: INFO nova.compute.manager [None req-b07cb04f-4b61-447a-9392-db5da4f592c3 tempest-ServerDiagnosticsV248Test-317965423 tempest-ServerDiagnosticsV248Test-317965423-project-admin] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Retrieving diagnostics [ 609.395774] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb69ca45-8eb9-4ecb-9a0c-9cc876916a37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.438577] env[68217]: INFO nova.compute.manager [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Took 30.56 seconds to build instance. [ 609.446175] env[68217]: DEBUG oslo_vmware.api [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960664, 'name': PowerOnVM_Task, 'duration_secs': 0.82411} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.447258] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 609.447258] env[68217]: INFO nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Took 6.03 seconds to spawn the instance on the hypervisor. [ 609.447258] env[68217]: DEBUG nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.447958] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722e338a-e282-44ed-8f53-583c0eee6ca2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.535590] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "refresh_cache-aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 609.535590] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Instance network_info: |[{"id": "800cf755-2034-482c-a604-63fbfe457f26", "address": "fa:16:3e:95:b6:ca", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap800cf755-20", "ovs_interfaceid": "800cf755-2034-482c-a604-63fbfe457f26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 609.535714] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:95:b6:ca', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '132fdc50-e144-4a9b-8d77-6378eec02d9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '800cf755-2034-482c-a604-63fbfe457f26', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 609.544866] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Creating folder: Project (734fb9662f984c239bc1648eb0033ff4). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 609.544866] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7a58e903-959f-4726-9834-ecffeab3af3f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.559608] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Created folder: Project (734fb9662f984c239bc1648eb0033ff4) in parent group-v594094. [ 609.559608] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Creating folder: Instances. Parent ref: group-v594135. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 609.559608] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fdcdf801-a46d-4ede-956a-2843fb38751f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.573966] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Created folder: Instances in parent group-v594135. [ 609.574292] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 609.574542] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 609.574773] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b9a9c3af-8c43-4714-b6c3-01ccd767ada2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.596310] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 609.596310] env[68217]: value = "task-2960670" [ 609.596310] env[68217]: _type = "Task" [ 609.596310] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.606311] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960670, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.617568] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960666, 'name': ReconfigVM_Task, 'duration_secs': 0.552561} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.617889] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Reconfigured VM instance instance-0000000e to attach disk [datastore1] af11d05f-4432-4505-bb52-226414488960/af11d05f-4432-4505-bb52-226414488960.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 609.618611] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-343ea054-8ce0-4b74-8c98-043da5e5fcd1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.630028] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 609.630028] env[68217]: value = "task-2960671" [ 609.630028] env[68217]: _type = "Task" [ 609.630028] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.648338] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960667, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.653443] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960671, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.721025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 609.721437] env[68217]: DEBUG oslo_concurrency.lockutils [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 609.721706] env[68217]: DEBUG nova.compute.manager [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 609.722882] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ce7011-6f2b-4bdf-b4c2-619aa3938bca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.731865] env[68217]: DEBUG nova.compute.manager [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 609.732644] env[68217]: DEBUG nova.objects.instance [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lazy-loading 'flavor' on Instance uuid 11f9c054-62b9-4ac9-9651-5c85e7a86663 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 609.843534] env[68217]: ERROR nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [req-d3e1be8b-b85f-4b02-a33f-fd9ce859f185] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-d3e1be8b-b85f-4b02-a33f-fd9ce859f185"}]} [ 609.863294] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 609.887180] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 609.887180] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 609.906049] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 609.929746] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 609.941298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-88602cc6-91c1-4d82-a240-e0cf8bb41851 tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.530s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 609.970677] env[68217]: INFO nova.compute.manager [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Took 21.37 seconds to build instance. [ 610.115161] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960670, 'name': CreateVM_Task, 'duration_secs': 0.404287} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.115368] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 610.118842] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.118842] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.118842] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 610.118842] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f222b374-1e07-4588-91d5-adf5d225a5c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.125230] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 610.125230] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521df9ba-93f9-d3bb-c269-d73e9e833362" [ 610.125230] env[68217]: _type = "Task" [ 610.125230] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.143019] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521df9ba-93f9-d3bb-c269-d73e9e833362, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.149879] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960671, 'name': Rename_Task, 'duration_secs': 0.193097} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.150401] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 610.150655] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-da737f7d-2290-4ae5-b480-fbe763edd14b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.155574] env[68217]: DEBUG oslo_vmware.api [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960667, 'name': PowerOnVM_Task, 'duration_secs': 0.644418} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.156658] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 610.156658] env[68217]: DEBUG nova.compute.manager [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 610.157384] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01681935-6363-44b9-be9a-dc04b5339f21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.168060] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 610.168060] env[68217]: value = "task-2960672" [ 610.168060] env[68217]: _type = "Task" [ 610.168060] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.183552] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960672, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.446755] env[68217]: DEBUG nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.458892] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.459681] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 610.475096] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b25145b9-a98d-4891-b7f2-47581761c339 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.004s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 610.484460] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee45ac43-4f46-43a4-b42e-14e7a32a11d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.502289] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69762570-cc94-4780-9b31-3b8e1680ab75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.539902] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8b3b11-8320-4a6e-bd4c-ca2de56b849f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.550777] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc644e4f-e5a2-423b-a242-8fb7523fa06b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.566289] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 610.645046] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521df9ba-93f9-d3bb-c269-d73e9e833362, 'name': SearchDatastore_Task, 'duration_secs': 0.021837} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 610.645375] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 610.645573] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 610.646544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 610.646544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 610.646544] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 610.646757] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-00e46644-ec1d-4aa9-af7e-e57f9e8bbc41 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.660125] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 610.660125] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 610.660125] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-548080a1-953b-4c3c-afcd-aa5f868cd306 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.670150] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 610.670150] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b16434-b81e-1e22-eb6c-f7d2ddd8974b" [ 610.670150] env[68217]: _type = "Task" [ 610.670150] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.688421] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b16434-b81e-1e22-eb6c-f7d2ddd8974b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.688421] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960672, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.690363] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 610.744257] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 610.744558] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d6373dd6-9802-4d69-82ac-fb25c9e8c996 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.757849] env[68217]: DEBUG oslo_vmware.api [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 610.757849] env[68217]: value = "task-2960673" [ 610.757849] env[68217]: _type = "Task" [ 610.757849] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.766152] env[68217]: DEBUG oslo_vmware.api [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960673, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.977762] env[68217]: DEBUG nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 610.987980] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.097448] env[68217]: ERROR nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [req-06e8e4a3-deff-4733-8e0c-ab59f4f98f86] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-06e8e4a3-deff-4733-8e0c-ab59f4f98f86"}]} [ 611.119804] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 611.140559] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 611.140791] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 611.153827] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 611.189018] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b16434-b81e-1e22-eb6c-f7d2ddd8974b, 'name': SearchDatastore_Task, 'duration_secs': 0.018629} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.189911] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 611.193461] env[68217]: DEBUG oslo_vmware.api [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2960672, 'name': PowerOnVM_Task, 'duration_secs': 0.808835} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.193932] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1663a375-944c-45f6-8d61-2af95e99e120 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.196692] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 611.199087] env[68217]: INFO nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Took 10.50 seconds to spawn the instance on the hypervisor. [ 611.199087] env[68217]: DEBUG nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 611.199087] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3309b26-f0dc-4a54-866b-4309a14d3273 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.212843] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 611.212843] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db86ce-4d39-2622-0528-cb571d8294a4" [ 611.212843] env[68217]: _type = "Task" [ 611.212843] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.223729] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db86ce-4d39-2622-0528-cb571d8294a4, 'name': SearchDatastore_Task, 'duration_secs': 0.013899} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.226893] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 611.227262] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5/aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 611.227913] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90443e78-a1d3-455f-8052-d6ab57897a9d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.238441] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 611.238441] env[68217]: value = "task-2960674" [ 611.238441] env[68217]: _type = "Task" [ 611.238441] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.249715] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960674, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.271653] env[68217]: DEBUG oslo_vmware.api [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960673, 'name': PowerOffVM_Task, 'duration_secs': 0.332671} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.276066] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 611.276216] env[68217]: DEBUG nova.compute.manager [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 611.279234] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b49cfe83-65d6-40c0-9dd4-735baadc514a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.514319] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.514671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.514909] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.515236] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.515280] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.515421] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.519403] env[68217]: INFO nova.compute.manager [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Terminating instance [ 611.589158] env[68217]: DEBUG nova.compute.manager [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Received event network-changed-800cf755-2034-482c-a604-63fbfe457f26 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 611.589491] env[68217]: DEBUG nova.compute.manager [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Refreshing instance network info cache due to event network-changed-800cf755-2034-482c-a604-63fbfe457f26. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 611.589678] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] Acquiring lock "refresh_cache-aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 611.589851] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] Acquired lock "refresh_cache-aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 611.590244] env[68217]: DEBUG nova.network.neutron [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Refreshing network info cache for port 800cf755-2034-482c-a604-63fbfe457f26 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 611.724803] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bda426a-0653-4f52-a396-f3f5079fdab9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.730414] env[68217]: INFO nova.compute.manager [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Took 31.63 seconds to build instance. [ 611.750677] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29092e67-c2df-4d5b-8131-9b22e71df797 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.791148] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-211e4cf3-043f-46d1-9f1c-797a04417cae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.793901] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960674, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.797146] env[68217]: DEBUG oslo_concurrency.lockutils [None req-03eea59e-55dd-40a2-a10e-d6ac2f021a34 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.076s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.808793] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf3699f-3d89-448c-8a43-570f9d9443da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.825392] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 611.865042] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.865042] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.865042] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 611.865042] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 611.866364] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 611.870201] env[68217]: INFO nova.compute.manager [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Terminating instance [ 612.029274] env[68217]: DEBUG nova.compute.manager [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 612.029274] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 612.029274] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9051fd9-5b20-4ed4-9394-abdae5a386a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.040950] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 612.041253] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3d8d185d-aa56-45fd-af96-c75b26e4d31f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.056732] env[68217]: DEBUG oslo_vmware.api [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 612.056732] env[68217]: value = "task-2960675" [ 612.056732] env[68217]: _type = "Task" [ 612.056732] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.068326] env[68217]: DEBUG oslo_vmware.api [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960675, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.236361] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e65db1cf-609b-4ca2-88ba-cf41b22ee0f0 tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "af11d05f-4432-4505-bb52-226414488960" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 42.984s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.259189] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960674, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573528} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.260367] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5/aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 612.260661] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 612.261256] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5967971d-e34d-430e-a1d4-9471cbbd132c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.270351] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 612.270351] env[68217]: value = "task-2960676" [ 612.270351] env[68217]: _type = "Task" [ 612.270351] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.285139] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960676, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.368834] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 39 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 612.369241] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 39 to 40 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 612.369521] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 612.375305] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "refresh_cache-0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.375484] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquired lock "refresh_cache-0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.378131] env[68217]: DEBUG nova.network.neutron [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.458517] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.459408] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.459682] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 612.459859] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.460105] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.462758] env[68217]: INFO nova.compute.manager [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Terminating instance [ 612.477799] env[68217]: DEBUG nova.network.neutron [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Updated VIF entry in instance network info cache for port 800cf755-2034-482c-a604-63fbfe457f26. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 612.478176] env[68217]: DEBUG nova.network.neutron [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Updating instance_info_cache with network_info: [{"id": "800cf755-2034-482c-a604-63fbfe457f26", "address": "fa:16:3e:95:b6:ca", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap800cf755-20", "ovs_interfaceid": "800cf755-2034-482c-a604-63fbfe457f26", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.569267] env[68217]: DEBUG oslo_vmware.api [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960675, 'name': PowerOffVM_Task, 'duration_secs': 0.402931} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.569761] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 612.569827] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 612.570947] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-86e370b5-1d63-4286-93c1-ab7dec7560bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.656565] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 612.656822] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 612.657059] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Deleting the datastore file [datastore1] bbd282ea-58aa-47b8-aa82-283a55ac1b29 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 612.657351] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa6a56e1-9c1f-4e2a-a1c8-f99688835967 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.660095] env[68217]: INFO nova.compute.manager [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Rebuilding instance [ 612.669062] env[68217]: DEBUG oslo_vmware.api [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for the task: (returnval){ [ 612.669062] env[68217]: value = "task-2960678" [ 612.669062] env[68217]: _type = "Task" [ 612.669062] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.678226] env[68217]: DEBUG oslo_vmware.api [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.724906] env[68217]: DEBUG nova.compute.manager [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 612.727266] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9847bdb-36a4-411a-bd2e-c9a3f6f02f62 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.740677] env[68217]: DEBUG nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 612.785169] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960676, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084195} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.785169] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 612.785169] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83ab4508-587b-4ec2-beef-f53729f8122e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.818052] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Reconfiguring VM instance instance-00000010 to attach disk [datastore1] aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5/aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 612.818733] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6c22f7a-0031-4980-a7cd-6f8f27604507 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.846162] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 612.846162] env[68217]: value = "task-2960679" [ 612.846162] env[68217]: _type = "Task" [ 612.846162] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.858341] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960679, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.876724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 6.651s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 612.879121] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 612.881796] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.950s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 612.884409] env[68217]: INFO nova.compute.claims [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.955164] env[68217]: DEBUG nova.network.neutron [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 612.967857] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "refresh_cache-ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.968151] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquired lock "refresh_cache-ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 612.968372] env[68217]: DEBUG nova.network.neutron [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 612.981133] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea075ca5-8a35-481d-860e-f3290a48bb37 req-5067c211-7b40-4b10-b7fb-ae108d991ed2 service nova] Releasing lock "refresh_cache-aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.109264] env[68217]: DEBUG nova.network.neutron [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.185883] env[68217]: DEBUG oslo_vmware.api [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.266751] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 613.363409] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960679, 'name': ReconfigVM_Task, 'duration_secs': 0.485765} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.363761] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Reconfigured VM instance instance-00000010 to attach disk [datastore1] aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5/aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 613.364955] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-60e5f012-42dc-4af6-ab07-269c1edd16b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.375211] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 613.375211] env[68217]: value = "task-2960680" [ 613.375211] env[68217]: _type = "Task" [ 613.375211] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.386326] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960680, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.394702] env[68217]: DEBUG nova.compute.utils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 613.396300] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 613.399482] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 613.518176] env[68217]: DEBUG nova.network.neutron [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 613.615829] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Releasing lock "refresh_cache-0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 613.615829] env[68217]: DEBUG nova.compute.manager [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 613.616202] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 613.617100] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f05b84cb-7bdc-434d-97b7-de9170465ec8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.628330] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.628330] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4f05ca7-3f66-4205-a544-53079acf44ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.635664] env[68217]: DEBUG oslo_vmware.api [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 613.635664] env[68217]: value = "task-2960681" [ 613.635664] env[68217]: _type = "Task" [ 613.635664] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.645600] env[68217]: DEBUG oslo_vmware.api [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960681, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.649035] env[68217]: DEBUG nova.network.neutron [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.657573] env[68217]: DEBUG nova.policy [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '348bde8560e04eee89f39a3a842e173b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '734fb9662f984c239bc1648eb0033ff4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 613.680048] env[68217]: DEBUG oslo_vmware.api [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Task: {'id': task-2960678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.521588} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.680486] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 613.680578] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 613.680676] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 613.680851] env[68217]: INFO nova.compute.manager [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Took 1.65 seconds to destroy the instance on the hypervisor. [ 613.681097] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 613.681304] env[68217]: DEBUG nova.compute.manager [-] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 613.681401] env[68217]: DEBUG nova.network.neutron [-] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 613.747331] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 613.747760] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-329db264-df65-4208-896c-d5edb2beb8c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.757995] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 613.757995] env[68217]: value = "task-2960682" [ 613.757995] env[68217]: _type = "Task" [ 613.757995] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.775456] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960682, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.892436] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960680, 'name': Rename_Task, 'duration_secs': 0.208906} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.893323] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 613.893752] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f125eef4-be3c-44af-a859-60de611ee9c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.903148] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 613.924135] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 613.924135] env[68217]: value = "task-2960683" [ 613.924135] env[68217]: _type = "Task" [ 613.924135] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.938811] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960683, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.097410] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "b5e15801-301a-4ee6-87d2-bbf749967631" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 614.097576] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "b5e15801-301a-4ee6-87d2-bbf749967631" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 614.153169] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Releasing lock "refresh_cache-ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 614.153755] env[68217]: DEBUG nova.compute.manager [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 614.154212] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.155718] env[68217]: DEBUG oslo_vmware.api [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960681, 'name': PowerOffVM_Task, 'duration_secs': 0.155724} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.155718] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c31892d-75fb-4d2b-a0d7-017960d4fe5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.160027] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.160027] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.160027] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1b6e1a8-3f45-4771-a7d3-4970815e0dd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.167788] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 614.170730] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-86218080-7579-4ee9-943c-f110a68e2cbf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.180284] env[68217]: DEBUG oslo_vmware.api [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 614.180284] env[68217]: value = "task-2960685" [ 614.180284] env[68217]: _type = "Task" [ 614.180284] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.190133] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.190372] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.190586] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Deleting the datastore file [datastore1] 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.195037] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cacd0b8a-37ff-4350-b077-ff8d4794faaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.197204] env[68217]: DEBUG oslo_vmware.api [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.204169] env[68217]: DEBUG oslo_vmware.api [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for the task: (returnval){ [ 614.204169] env[68217]: value = "task-2960686" [ 614.204169] env[68217]: _type = "Task" [ 614.204169] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.218615] env[68217]: DEBUG oslo_vmware.api [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960686, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.272816] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960682, 'name': PowerOffVM_Task, 'duration_secs': 0.236079} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.272816] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.272920] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.274188] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-209df808-2ea4-4315-bdf7-b5a2edb50b00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.283411] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.286425] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-57b8b04b-5faa-448e-bd07-f02771f46c25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.363684] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.363684] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.363684] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleting the datastore file [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.364230] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Successfully created port: 746296a3-f700-4dff-823d-7b979c3ed19d {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 614.366325] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-146e9517-c811-4585-ab76-91a6e4d53801 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.379667] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 614.379667] env[68217]: value = "task-2960688" [ 614.379667] env[68217]: _type = "Task" [ 614.379667] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.389764] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.437992] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960683, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.491058] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caa75899-eff8-4e61-9f59-eabfe36c4c12 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.499747] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d690cf6-a02f-4bb7-9892-051504da6b9b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.542117] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8534e3c3-ca59-42b0-bbb7-4652f795e13c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.551211] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe967e8-ec22-4ce6-a806-2c49ee687fbb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.556099] env[68217]: DEBUG nova.compute.manager [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Received event network-changed-199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 614.556345] env[68217]: DEBUG nova.compute.manager [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Refreshing instance network info cache due to event network-changed-199674eb-b628-4b78-a622-1e10863e5716. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 614.556505] env[68217]: DEBUG oslo_concurrency.lockutils [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] Acquiring lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 614.556989] env[68217]: DEBUG oslo_concurrency.lockutils [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] Acquired lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 614.556989] env[68217]: DEBUG nova.network.neutron [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Refreshing network info cache for port 199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 614.569428] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 614.688863] env[68217]: DEBUG oslo_vmware.api [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960685, 'name': PowerOffVM_Task, 'duration_secs': 0.20089} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.689563] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 614.689813] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 614.690074] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c6efc24c-bda8-4072-8eae-3f54b4d312e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.718138] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 614.718345] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 614.718529] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Deleting the datastore file [datastore1] ae5fa3f4-e487-40ed-9ca4-12a6f9713eba {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 614.718773] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-02fea9c2-0610-48bb-8b20-ea3c3ff05f13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.725311] env[68217]: DEBUG oslo_vmware.api [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Task: {'id': task-2960686, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.13983} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.726012] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.726239] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.726864] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.726864] env[68217]: INFO nova.compute.manager [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 614.726864] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 614.727545] env[68217]: DEBUG nova.compute.manager [-] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 614.727545] env[68217]: DEBUG nova.network.neutron [-] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 614.730789] env[68217]: DEBUG oslo_vmware.api [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for the task: (returnval){ [ 614.730789] env[68217]: value = "task-2960690" [ 614.730789] env[68217]: _type = "Task" [ 614.730789] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.738844] env[68217]: DEBUG oslo_vmware.api [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960690, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.757516] env[68217]: DEBUG nova.network.neutron [-] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 614.893299] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.154784} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.893299] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 614.893299] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 614.893586] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.895798] env[68217]: DEBUG nova.network.neutron [-] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.922581] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 614.925031] env[68217]: DEBUG nova.objects.instance [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lazy-loading 'flavor' on Instance uuid 11f9c054-62b9-4ac9-9651-5c85e7a86663 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 614.940626] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960683, 'name': PowerOnVM_Task, 'duration_secs': 0.528584} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.940884] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 614.941130] env[68217]: INFO nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Took 8.86 seconds to spawn the instance on the hypervisor. [ 614.941270] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 614.942121] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-826bce0b-9fb7-435f-aa4c-c61fc7dd314b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.967756] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 614.967756] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 614.967756] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 614.967936] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 614.967936] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 614.968102] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 614.968321] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 614.968477] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 614.968637] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 614.968795] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 614.969176] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 614.970032] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a685149-b467-46f9-a995-b8dda28368df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.978501] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0299bf3c-c6a2-4c1b-a6d2-1494cd6da960 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.103252] env[68217]: ERROR nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [req-744fc69d-360a-4938-a87b-7ce7b7b2ff17] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-744fc69d-360a-4938-a87b-7ce7b7b2ff17"}]} [ 615.124792] env[68217]: DEBUG nova.compute.manager [req-e8f6d2e4-b97b-4b55-8b0a-5760ba669895 req-82086d58-2f74-422e-8248-695b7e8190e0 service nova] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Received event network-vif-deleted-8b1504f0-1a6e-4b49-8c7d-52cddf1b91e1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 615.127845] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 615.157653] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 615.157987] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 615.188338] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 615.218227] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 615.242089] env[68217]: DEBUG oslo_vmware.api [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Task: {'id': task-2960690, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345817} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.245420] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 615.245616] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 615.246070] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 615.246233] env[68217]: INFO nova.compute.manager [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Took 1.09 seconds to destroy the instance on the hypervisor. [ 615.246481] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 615.247444] env[68217]: DEBUG nova.compute.manager [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 615.247444] env[68217]: DEBUG nova.network.neutron [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 615.262416] env[68217]: DEBUG nova.network.neutron [-] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.293158] env[68217]: DEBUG nova.network.neutron [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 615.405671] env[68217]: INFO nova.compute.manager [-] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Took 1.72 seconds to deallocate network for instance. [ 615.433738] env[68217]: DEBUG oslo_concurrency.lockutils [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 615.434162] env[68217]: DEBUG oslo_concurrency.lockutils [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquired lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 615.434424] env[68217]: DEBUG nova.network.neutron [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 615.434588] env[68217]: DEBUG nova.objects.instance [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lazy-loading 'info_cache' on Instance uuid 11f9c054-62b9-4ac9-9651-5c85e7a86663 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 615.463602] env[68217]: INFO nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Took 26.35 seconds to build instance. [ 615.609829] env[68217]: DEBUG nova.network.neutron [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Updated VIF entry in instance network info cache for port 199674eb-b628-4b78-a622-1e10863e5716. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 615.610265] env[68217]: DEBUG nova.network.neutron [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Updating instance_info_cache with network_info: [{"id": "199674eb-b628-4b78-a622-1e10863e5716", "address": "fa:16:3e:c9:de:e6", "network": {"id": "a4ff5577-8c9a-49e8-89d7-e86362ebdaa3", "bridge": "br-int", "label": "tempest-VolumesAssistedSnapshotsTest-93198499-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.254", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "aa6ecdf4d8874a98bd3db5fc64456622", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap199674eb-b6", "ovs_interfaceid": "199674eb-b628-4b78-a622-1e10863e5716", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.765849] env[68217]: INFO nova.compute.manager [-] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Took 1.04 seconds to deallocate network for instance. [ 615.793769] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45acb85b-1ee7-40af-9e10-a41bd2782b99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.798855] env[68217]: DEBUG nova.network.neutron [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.809052] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c74baf0f-8061-4f28-b3a5-76cde81f9b30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.842440] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8653a8cc-dd08-41e1-a30e-7ca23a0e5fd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.850877] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f99d932-65f2-4aa5-8707-8c1b644f2cff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.866079] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 615.913755] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 615.941116] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 615.941116] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 615.941116] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 615.941116] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 615.941388] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 615.941388] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 615.941725] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 615.941725] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 615.943032] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 615.943032] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 615.943032] env[68217]: DEBUG nova.virt.hardware [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 615.944943] env[68217]: DEBUG nova.objects.base [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Object Instance<11f9c054-62b9-4ac9-9651-5c85e7a86663> lazy-loaded attributes: flavor,info_cache {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 615.948908] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fb7ae4-8c2a-491c-8f7c-8c740585f211 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.961580] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98305953-d265-4b1b-ae37-6419c86c8c22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.967655] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 43.699s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 615.978263] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 615.983921] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 615.984787] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 615.985028] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-093a9473-382d-4513-bb31-37f26c50e749 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.002157] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 616.002157] env[68217]: value = "task-2960691" [ 616.002157] env[68217]: _type = "Task" [ 616.002157] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.010209] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960691, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.113565] env[68217]: DEBUG oslo_concurrency.lockutils [req-158dd9bc-1bc3-461e-a659-ffd3d40e7d16 req-37fae87d-f13e-4c02-8444-beff871ede3c service nova] Releasing lock "refresh_cache-af11d05f-4432-4505-bb52-226414488960" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 616.280725] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.302707] env[68217]: INFO nova.compute.manager [-] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Took 1.06 seconds to deallocate network for instance. [ 616.406489] env[68217]: DEBUG nova.scheduler.client.report [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 41 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 616.406793] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 41 to 42 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 616.407012] env[68217]: DEBUG nova.compute.provider_tree [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 616.471229] env[68217]: DEBUG nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 616.488356] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Successfully updated port: 746296a3-f700-4dff-823d-7b979c3ed19d {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 616.519078] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960691, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.813435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.914951] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.030s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.914951] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 616.921128] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.974s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.924269] env[68217]: INFO nova.compute.claims [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 616.943995] env[68217]: DEBUG nova.compute.manager [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Received event network-vif-plugged-746296a3-f700-4dff-823d-7b979c3ed19d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 616.944355] env[68217]: DEBUG oslo_concurrency.lockutils [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] Acquiring lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 616.945377] env[68217]: DEBUG oslo_concurrency.lockutils [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 616.945576] env[68217]: DEBUG oslo_concurrency.lockutils [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 616.945974] env[68217]: DEBUG nova.compute.manager [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] No waiting events found dispatching network-vif-plugged-746296a3-f700-4dff-823d-7b979c3ed19d {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 616.946209] env[68217]: WARNING nova.compute.manager [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Received unexpected event network-vif-plugged-746296a3-f700-4dff-823d-7b979c3ed19d for instance with vm_state building and task_state spawning. [ 616.948105] env[68217]: DEBUG nova.compute.manager [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Received event network-changed-746296a3-f700-4dff-823d-7b979c3ed19d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 616.948105] env[68217]: DEBUG nova.compute.manager [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Refreshing instance network info cache due to event network-changed-746296a3-f700-4dff-823d-7b979c3ed19d. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 616.948105] env[68217]: DEBUG oslo_concurrency.lockutils [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] Acquiring lock "refresh_cache-480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 616.948105] env[68217]: DEBUG oslo_concurrency.lockutils [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] Acquired lock "refresh_cache-480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 616.948105] env[68217]: DEBUG nova.network.neutron [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Refreshing network info cache for port 746296a3-f700-4dff-823d-7b979c3ed19d {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 616.990083] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "refresh_cache-480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.016296] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 617.024632] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960691, 'name': CreateVM_Task, 'duration_secs': 0.579935} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.025337] env[68217]: DEBUG nova.network.neutron [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Updating instance_info_cache with network_info: [{"id": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "address": "fa:16:3e:90:07:ac", "network": {"id": "3f8eb120-19fa-420f-a14b-3cf960a6fe58", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1021764887-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cff35c33460c4a50ae6bee636d950504", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e885ebd4-93ca-4e9e-8889-0f16bd91e61e", "external-id": "nsx-vlan-transportzone-580", "segmentation_id": 580, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd2eec7fc-f6", "ovs_interfaceid": "d2eec7fc-f623-4a8e-aee2-762e1eb58cf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.029199] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 617.029199] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.029199] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.029199] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 617.029199] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-378b0f75-b4a6-4d23-a44e-663b9b3c0d39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.034869] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 617.034869] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52060884-1ac1-dfa2-9825-f766cd6c333b" [ 617.034869] env[68217]: _type = "Task" [ 617.034869] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.045994] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52060884-1ac1-dfa2-9825-f766cd6c333b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.431613] env[68217]: DEBUG nova.compute.utils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 617.436779] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 617.436779] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 617.523642] env[68217]: DEBUG nova.network.neutron [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 617.532020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Releasing lock "refresh_cache-11f9c054-62b9-4ac9-9651-5c85e7a86663" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.550827] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52060884-1ac1-dfa2-9825-f766cd6c333b, 'name': SearchDatastore_Task, 'duration_secs': 0.012368} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.553012] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 617.553012] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 617.553650] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.554635] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 617.554635] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 617.555135] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3eddf3ad-0507-4014-998b-eec65adc3fd4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.569075] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 617.569075] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 617.569075] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2af092b-9b1e-435f-82e4-99341b8cae35 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.575504] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 617.575504] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52127e32-81bf-c349-229c-35649cc13528" [ 617.575504] env[68217]: _type = "Task" [ 617.575504] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.584886] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52127e32-81bf-c349-229c-35649cc13528, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.606505] env[68217]: DEBUG nova.policy [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '348bde8560e04eee89f39a3a842e173b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '734fb9662f984c239bc1648eb0033ff4', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 617.744079] env[68217]: DEBUG nova.network.neutron [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 617.937742] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 618.088016] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52127e32-81bf-c349-229c-35649cc13528, 'name': SearchDatastore_Task, 'duration_secs': 0.012042} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.091516] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26a387f8-588f-4ac9-b554-a0a22f4d9e6e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.097423] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 618.097423] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b6275f-0669-d10a-eaa3-3c10f0ee0172" [ 618.097423] env[68217]: _type = "Task" [ 618.097423] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.105484] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b6275f-0669-d10a-eaa3-3c10f0ee0172, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.249151] env[68217]: DEBUG oslo_concurrency.lockutils [req-15b1212b-1087-48fe-b3b6-3fe16046038e req-ace7a5c8-296f-4067-af27-8d3e1a588043 service nova] Releasing lock "refresh_cache-480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.252170] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "refresh_cache-480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 618.252280] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 618.530045] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bdc8fb4-a421-43b7-ad1f-d3a617527052 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.541959] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 618.543526] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edf00cb9-f9fd-4b78-a91e-f03a16fc2318 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.548808] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ad0d0866-bfb6-441b-8a4c-baefd4ab643c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.589444] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbb51217-eed8-4608-9a45-46c294fc0761 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.593625] env[68217]: DEBUG oslo_vmware.api [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 618.593625] env[68217]: value = "task-2960692" [ 618.593625] env[68217]: _type = "Task" [ 618.593625] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.605019] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cad5143-1698-4027-b623-4fc8c89f826d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.613605] env[68217]: DEBUG oslo_vmware.api [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960692, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.629040] env[68217]: DEBUG nova.compute.provider_tree [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.633726] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b6275f-0669-d10a-eaa3-3c10f0ee0172, 'name': SearchDatastore_Task, 'duration_secs': 0.021465} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.633783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 618.634732] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 618.634732] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a6215c5-be60-48b8-88d8-c0ec09e31a46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.643434] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 618.643434] env[68217]: value = "task-2960693" [ 618.643434] env[68217]: _type = "Task" [ 618.643434] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.652367] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960693, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.845155] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Successfully created port: c4d2ead2-9ca2-4172-bf51-fd9673282e6b {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 618.880899] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 618.957361] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 619.001886] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 619.002102] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.003881] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 619.005679] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.006182] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 619.006182] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 619.006182] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 619.006357] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 619.007935] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 619.007935] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 619.007935] env[68217]: DEBUG nova.virt.hardware [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 619.007935] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d889b910-706e-4528-bad8-e13c3fd75c14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.024149] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c986d12-c295-42d7-9e47-c97651fe191a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.113601] env[68217]: DEBUG oslo_vmware.api [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960692, 'name': PowerOnVM_Task, 'duration_secs': 0.390386} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.113601] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 619.113601] env[68217]: DEBUG nova.compute.manager [None req-89aea0d6-a266-4845-9570-d4002da3e111 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 619.113601] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ef4bdbe-1f3c-4167-82dc-84a63292ca59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.136075] env[68217]: DEBUG nova.scheduler.client.report [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 619.157043] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960693, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.405163] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Updating instance_info_cache with network_info: [{"id": "746296a3-f700-4dff-823d-7b979c3ed19d", "address": "fa:16:3e:5e:95:01", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap746296a3-f7", "ovs_interfaceid": "746296a3-f700-4dff-823d-7b979c3ed19d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.646255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.723s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 619.646255] env[68217]: DEBUG nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 619.648456] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.077s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.651372] env[68217]: INFO nova.compute.claims [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.677100] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960693, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.658609} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.677100] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 619.677100] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 619.677100] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1c3148e2-338e-4ee6-8593-90992aea3ed1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.686040] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 619.686040] env[68217]: value = "task-2960694" [ 619.686040] env[68217]: _type = "Task" [ 619.686040] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.697990] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960694, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.839886] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "e089c20e-b788-4e6c-9bd2-9ad485305582" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 619.840117] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 619.908475] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "refresh_cache-480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 619.908837] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Instance network_info: |[{"id": "746296a3-f700-4dff-823d-7b979c3ed19d", "address": "fa:16:3e:5e:95:01", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap746296a3-f7", "ovs_interfaceid": "746296a3-f700-4dff-823d-7b979c3ed19d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 619.909655] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5e:95:01', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '132fdc50-e144-4a9b-8d77-6378eec02d9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '746296a3-f700-4dff-823d-7b979c3ed19d', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.918855] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 619.919545] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.919545] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c671a76c-7319-4b4d-9b6a-a50b84974e4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.944244] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.944244] env[68217]: value = "task-2960695" [ 619.944244] env[68217]: _type = "Task" [ 619.944244] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.955578] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960695, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.161931] env[68217]: DEBUG nova.compute.utils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 620.163644] env[68217]: DEBUG nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 620.163810] env[68217]: DEBUG nova.network.neutron [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 620.196314] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960694, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.078291} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.196662] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 620.197561] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8425d461-e710-4495-b694-9ba932332fde {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.218667] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Reconfiguring VM instance instance-0000000f to attach disk [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 620.219437] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da6b2ac9-5f20-4c34-b44a-e61ba673531c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.242198] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 620.242198] env[68217]: value = "task-2960696" [ 620.242198] env[68217]: _type = "Task" [ 620.242198] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.246501] env[68217]: DEBUG nova.policy [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a9ce00cf16c64f8fb1f6d09dbd0c980b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '32d81faa5cbf43a888871c19126ea6e0', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 620.253732] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960696, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.459240] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960695, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.672977] env[68217]: DEBUG nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 620.763378] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960696, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.959494] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960695, 'name': CreateVM_Task, 'duration_secs': 0.702044} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.959709] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 620.960816] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.960816] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 620.960910] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 620.961143] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7c759f39-3b3a-4b29-a957-548692a0eace {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.968468] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 620.968468] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c5478f-bfb8-f7ee-f7bc-f10dbc195954" [ 620.968468] env[68217]: _type = "Task" [ 620.968468] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.978074] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c5478f-bfb8-f7ee-f7bc-f10dbc195954, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.184260] env[68217]: DEBUG nova.network.neutron [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Successfully created port: b9eba7e7-2ecb-4c93-bf49-1c2195e23121 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.232946] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-709fe701-4d93-4dbf-8c24-5abb8463fc22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.247277] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b64517d-a01b-47b9-8339-a16834d108e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.289447] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960696, 'name': ReconfigVM_Task, 'duration_secs': 0.561406} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.289447] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a00c8f92-d41e-4bd7-8adf-7df3fbfcdc66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.292049] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Reconfigured VM instance instance-0000000f to attach disk [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8/93b49e91-5e9a-4b11-a833-31ab0883e0e8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 621.292752] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ccfbe5fb-55f5-4177-9ea2-0dd6129df3bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.303296] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b19d71c-c38d-447f-babd-1b13f9b843cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.307286] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 621.307286] env[68217]: value = "task-2960697" [ 621.307286] env[68217]: _type = "Task" [ 621.307286] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.319909] env[68217]: DEBUG nova.compute.provider_tree [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 621.327535] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960697, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.415512] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "da1524a7-2756-4429-ada2-b1f493544bd2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 621.415764] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 621.483101] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c5478f-bfb8-f7ee-f7bc-f10dbc195954, 'name': SearchDatastore_Task, 'duration_secs': 0.011217} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.483439] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 621.483979] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 621.484331] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.484590] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 621.484824] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 621.485195] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-64a3a85b-c524-4c8b-86c3-8fdfafb146c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.497268] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 621.497494] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 621.498246] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c7f65e9e-ad32-4f94-8d28-6e38f1f88ad2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.507599] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 621.507599] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b95495-ae6d-d161-37e7-0ffebc82ec88" [ 621.507599] env[68217]: _type = "Task" [ 621.507599] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.514254] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b95495-ae6d-d161-37e7-0ffebc82ec88, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.688408] env[68217]: DEBUG nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 621.722622] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 621.722768] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.722819] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 621.723017] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.723307] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 621.723784] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 621.724030] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 621.724204] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 621.724379] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 621.725026] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 621.725279] env[68217]: DEBUG nova.virt.hardware [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 621.727025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7105fd48-f3be-4422-bfaf-b789d02f93f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.735998] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50441bf0-f769-4273-8ce4-90ce1bf04025 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.743563] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Successfully updated port: c4d2ead2-9ca2-4172-bf51-fd9673282e6b {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 621.821529] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960697, 'name': Rename_Task, 'duration_secs': 0.233049} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.821826] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 621.822732] env[68217]: DEBUG nova.scheduler.client.report [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 621.828902] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0134ecf9-c700-48f3-8193-07a656decf8b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.834765] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 621.834765] env[68217]: value = "task-2960698" [ 621.834765] env[68217]: _type = "Task" [ 621.834765] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.844905] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960698, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.020203] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b95495-ae6d-d161-37e7-0ffebc82ec88, 'name': SearchDatastore_Task, 'duration_secs': 0.010441} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.021102] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e084ea36-0d4b-4817-b3dc-b8e8c7ef80c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.026875] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 622.026875] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521351a3-79bd-6f92-b79d-3b4eb65c243e" [ 622.026875] env[68217]: _type = "Task" [ 622.026875] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.038526] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521351a3-79bd-6f92-b79d-3b4eb65c243e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.248168] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "refresh_cache-63e0fc9e-5182-4781-b007-69e2134718df" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.248448] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "refresh_cache-63e0fc9e-5182-4781-b007-69e2134718df" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 622.249245] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 622.291909] env[68217]: DEBUG nova.compute.manager [req-2e789290-547e-456c-a1fc-9f052dde53fc req-0ef95f7c-46d4-4fb8-bea9-73721ea0deba service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Received event network-vif-plugged-c4d2ead2-9ca2-4172-bf51-fd9673282e6b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 622.292565] env[68217]: DEBUG oslo_concurrency.lockutils [req-2e789290-547e-456c-a1fc-9f052dde53fc req-0ef95f7c-46d4-4fb8-bea9-73721ea0deba service nova] Acquiring lock "63e0fc9e-5182-4781-b007-69e2134718df-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 622.294247] env[68217]: DEBUG oslo_concurrency.lockutils [req-2e789290-547e-456c-a1fc-9f052dde53fc req-0ef95f7c-46d4-4fb8-bea9-73721ea0deba service nova] Lock "63e0fc9e-5182-4781-b007-69e2134718df-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.294705] env[68217]: DEBUG oslo_concurrency.lockutils [req-2e789290-547e-456c-a1fc-9f052dde53fc req-0ef95f7c-46d4-4fb8-bea9-73721ea0deba service nova] Lock "63e0fc9e-5182-4781-b007-69e2134718df-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.294899] env[68217]: DEBUG nova.compute.manager [req-2e789290-547e-456c-a1fc-9f052dde53fc req-0ef95f7c-46d4-4fb8-bea9-73721ea0deba service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] No waiting events found dispatching network-vif-plugged-c4d2ead2-9ca2-4172-bf51-fd9673282e6b {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 622.295078] env[68217]: WARNING nova.compute.manager [req-2e789290-547e-456c-a1fc-9f052dde53fc req-0ef95f7c-46d4-4fb8-bea9-73721ea0deba service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Received unexpected event network-vif-plugged-c4d2ead2-9ca2-4172-bf51-fd9673282e6b for instance with vm_state building and task_state spawning. [ 622.329484] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.681s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 622.330066] env[68217]: DEBUG nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 622.333592] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.724s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 622.333863] env[68217]: DEBUG nova.objects.instance [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lazy-loading 'resources' on Instance uuid 7056fb29-2a2f-4275-a411-4d5f3fcb421f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 622.344571] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960698, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.543629] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521351a3-79bd-6f92-b79d-3b4eb65c243e, 'name': SearchDatastore_Task, 'duration_secs': 0.010432} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.543629] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 622.543629] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9/480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 622.543629] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f5436a94-461d-490d-b220-3cd85b161661 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.552685] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 622.552685] env[68217]: value = "task-2960699" [ 622.552685] env[68217]: _type = "Task" [ 622.552685] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.563334] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960699, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.836437] env[68217]: DEBUG nova.compute.utils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 622.844571] env[68217]: DEBUG nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 622.845212] env[68217]: DEBUG nova.network.neutron [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 622.862503] env[68217]: DEBUG oslo_vmware.api [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960698, 'name': PowerOnVM_Task, 'duration_secs': 0.725981} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.864903] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 622.867043] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 622.867279] env[68217]: DEBUG nova.compute.manager [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 622.869155] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fa0860d-9c3b-405f-bcab-ef884c5ffc65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.067165] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960699, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.103582] env[68217]: DEBUG nova.policy [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b23052f548884b09bc58f4e0e6783591', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36c9130982204910a139a45ddad542c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 623.349573] env[68217]: DEBUG nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 623.400301] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 623.475094] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba67c87d-523c-4775-83a1-6e6163fe988c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.487697] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cb1426-d10a-47b2-90e6-63e95775f3e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.524840] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28cbda0b-162f-43f9-b3b1-7081fc113ed8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.537575] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df7762cf-b914-4b2e-9c95-df1f11b4c31a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.557551] env[68217]: DEBUG nova.compute.provider_tree [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 623.568156] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960699, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595854} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.568555] env[68217]: DEBUG nova.network.neutron [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Successfully updated port: b9eba7e7-2ecb-4c93-bf49-1c2195e23121 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 623.570231] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9/480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 623.570231] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 623.570626] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e1440ad4-d647-40d4-a661-76f51fa5512e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.578176] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 623.578176] env[68217]: value = "task-2960700" [ 623.578176] env[68217]: _type = "Task" [ 623.578176] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.588806] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960700, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.866380] env[68217]: DEBUG nova.network.neutron [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Updating instance_info_cache with network_info: [{"id": "c4d2ead2-9ca2-4172-bf51-fd9673282e6b", "address": "fa:16:3e:27:8f:71", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d2ead2-9c", "ovs_interfaceid": "c4d2ead2-9ca2-4172-bf51-fd9673282e6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.074185] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "refresh_cache-678acc61-1c94-4152-b4e8-7569ab169ab9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 624.074326] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquired lock "refresh_cache-678acc61-1c94-4152-b4e8-7569ab169ab9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 624.074966] env[68217]: DEBUG nova.network.neutron [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 624.089856] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960700, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.322305} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.090840] env[68217]: ERROR nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [req-5b0e6f4d-9b60-4f2b-8988-674bac8ec4b0] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5b0e6f4d-9b60-4f2b-8988-674bac8ec4b0"}]} [ 624.092120] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 624.094833] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1680ce-4dbf-4dce-8fa6-50a2e06a97cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.132066] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9/480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 624.132066] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 624.132066] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33bd4958-5fb1-4cae-966f-40ff3709b692 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.155651] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 624.155651] env[68217]: value = "task-2960701" [ 624.155651] env[68217]: _type = "Task" [ 624.155651] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.168058] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 624.168286] env[68217]: DEBUG nova.compute.provider_tree [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 624.176701] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.187088] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 624.208628] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 624.367488] env[68217]: DEBUG nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 624.373216] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "refresh_cache-63e0fc9e-5182-4781-b007-69e2134718df" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 624.373216] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Instance network_info: |[{"id": "c4d2ead2-9ca2-4172-bf51-fd9673282e6b", "address": "fa:16:3e:27:8f:71", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d2ead2-9c", "ovs_interfaceid": "c4d2ead2-9ca2-4172-bf51-fd9673282e6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 624.373605] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:27:8f:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '132fdc50-e144-4a9b-8d77-6378eec02d9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4d2ead2-9ca2-4172-bf51-fd9673282e6b', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.386772] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 624.391202] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 624.392846] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7d14833d-dda6-40bb-8254-0420e1e53aa1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.411204] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:15:08Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='447767842',id=23,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-1140066754',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 624.411480] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.411685] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 624.411744] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.411880] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 624.412043] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 624.412421] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 624.412421] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 624.412601] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 624.415949] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 624.416225] env[68217]: DEBUG nova.virt.hardware [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 624.417772] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e74a037-e6ee-4d6b-a15a-17500be3e6e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.427600] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad7091c6-5d51-47ed-a1a4-aa6d2bf0b4f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.431848] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.431848] env[68217]: value = "task-2960702" [ 624.431848] env[68217]: _type = "Task" [ 624.431848] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.451236] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960702, 'name': CreateVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.460889] env[68217]: DEBUG nova.compute.manager [req-32f357c7-a4f7-4dd4-b9d3-0bff82e08d0c req-09fb2ee9-2adb-4266-bf0a-33c981c78fcd service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Received event network-vif-plugged-b9eba7e7-2ecb-4c93-bf49-1c2195e23121 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 624.461127] env[68217]: DEBUG oslo_concurrency.lockutils [req-32f357c7-a4f7-4dd4-b9d3-0bff82e08d0c req-09fb2ee9-2adb-4266-bf0a-33c981c78fcd service nova] Acquiring lock "678acc61-1c94-4152-b4e8-7569ab169ab9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 624.461337] env[68217]: DEBUG oslo_concurrency.lockutils [req-32f357c7-a4f7-4dd4-b9d3-0bff82e08d0c req-09fb2ee9-2adb-4266-bf0a-33c981c78fcd service nova] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 624.461497] env[68217]: DEBUG oslo_concurrency.lockutils [req-32f357c7-a4f7-4dd4-b9d3-0bff82e08d0c req-09fb2ee9-2adb-4266-bf0a-33c981c78fcd service nova] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 624.461940] env[68217]: DEBUG nova.compute.manager [req-32f357c7-a4f7-4dd4-b9d3-0bff82e08d0c req-09fb2ee9-2adb-4266-bf0a-33c981c78fcd service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] No waiting events found dispatching network-vif-plugged-b9eba7e7-2ecb-4c93-bf49-1c2195e23121 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 624.461940] env[68217]: WARNING nova.compute.manager [req-32f357c7-a4f7-4dd4-b9d3-0bff82e08d0c req-09fb2ee9-2adb-4266-bf0a-33c981c78fcd service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Received unexpected event network-vif-plugged-b9eba7e7-2ecb-4c93-bf49-1c2195e23121 for instance with vm_state building and task_state spawning. [ 624.651530] env[68217]: DEBUG nova.network.neutron [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Successfully created port: 59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 624.674710] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960701, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.675518] env[68217]: DEBUG nova.network.neutron [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 624.848653] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-298eadbd-41e8-47c6-a65f-30783100b57b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.856609] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6be9501-ddd2-43f8-8ab6-7660f7d6e041 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.893616] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d983ff18-8e0d-4fe1-acfb-ef1f12adac44 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.904522] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f56083d-7dad-48a6-a20a-73b8ccf6c5f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.930936] env[68217]: DEBUG nova.compute.provider_tree [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 624.942328] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960702, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.007164] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 625.008052] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 625.068622] env[68217]: DEBUG nova.network.neutron [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Updating instance_info_cache with network_info: [{"id": "b9eba7e7-2ecb-4c93-bf49-1c2195e23121", "address": "fa:16:3e:f6:7b:87", "network": {"id": "a70d300c-094f-483f-bfc9-9f273664a818", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-287149275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32d81faa5cbf43a888871c19126ea6e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "980cb890-345b-4bf8-990a-a2faec78e49c", "external-id": "nsx-vlan-transportzone-965", "segmentation_id": 965, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9eba7e7-2e", "ovs_interfaceid": "b9eba7e7-2ecb-4c93-bf49-1c2195e23121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 625.170028] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960701, 'name': ReconfigVM_Task, 'duration_secs': 0.868514} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.170028] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Reconfigured VM instance instance-00000011 to attach disk [datastore1] 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9/480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.170935] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aef3d208-6290-4f64-91c1-cc1ccb570809 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.180656] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 625.180656] env[68217]: value = "task-2960703" [ 625.180656] env[68217]: _type = "Task" [ 625.180656] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.194740] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960703, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.270554] env[68217]: DEBUG nova.compute.manager [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Received event network-changed-c4d2ead2-9ca2-4172-bf51-fd9673282e6b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 625.270554] env[68217]: DEBUG nova.compute.manager [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Refreshing instance network info cache due to event network-changed-c4d2ead2-9ca2-4172-bf51-fd9673282e6b. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 625.270554] env[68217]: DEBUG oslo_concurrency.lockutils [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] Acquiring lock "refresh_cache-63e0fc9e-5182-4781-b007-69e2134718df" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.271308] env[68217]: DEBUG oslo_concurrency.lockutils [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] Acquired lock "refresh_cache-63e0fc9e-5182-4781-b007-69e2134718df" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.271669] env[68217]: DEBUG nova.network.neutron [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Refreshing network info cache for port c4d2ead2-9ca2-4172-bf51-fd9673282e6b {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 625.446072] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960702, 'name': CreateVM_Task, 'duration_secs': 0.597994} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.446786] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 625.447559] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.447803] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.448593] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 625.448873] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7311d8a-019f-4886-8840-c9947c3f66bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.456821] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 625.456821] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52928930-4be0-cc07-8961-7242b8e09791" [ 625.456821] env[68217]: _type = "Task" [ 625.456821] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.466651] env[68217]: ERROR nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [req-ddf1903d-5ddb-4492-a568-936a21b0b4f6] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ddf1903d-5ddb-4492-a568-936a21b0b4f6"}]} [ 625.474331] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52928930-4be0-cc07-8961-7242b8e09791, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.495151] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 625.517237] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 625.517692] env[68217]: DEBUG nova.compute.provider_tree [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 625.540889] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 625.566039] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 625.574684] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Releasing lock "refresh_cache-678acc61-1c94-4152-b4e8-7569ab169ab9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.575098] env[68217]: DEBUG nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Instance network_info: |[{"id": "b9eba7e7-2ecb-4c93-bf49-1c2195e23121", "address": "fa:16:3e:f6:7b:87", "network": {"id": "a70d300c-094f-483f-bfc9-9f273664a818", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-287149275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32d81faa5cbf43a888871c19126ea6e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "980cb890-345b-4bf8-990a-a2faec78e49c", "external-id": "nsx-vlan-transportzone-965", "segmentation_id": 965, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9eba7e7-2e", "ovs_interfaceid": "b9eba7e7-2ecb-4c93-bf49-1c2195e23121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 625.575577] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:7b:87', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '980cb890-345b-4bf8-990a-a2faec78e49c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9eba7e7-2ecb-4c93-bf49-1c2195e23121', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 625.586584] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Creating folder: Project (32d81faa5cbf43a888871c19126ea6e0). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.586908] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9e9b6c11-9186-4283-9f27-4228fa33e341 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.603511] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Created folder: Project (32d81faa5cbf43a888871c19126ea6e0) in parent group-v594094. [ 625.603697] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Creating folder: Instances. Parent ref: group-v594141. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 625.603930] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4030120-a5ce-4483-bddd-08d32e294226 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.613819] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Created folder: Instances in parent group-v594141. [ 625.613935] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 625.614148] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 625.614365] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2bdf1949-9764-49e1-8266-ca7d2b9ec0a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.638249] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 625.638249] env[68217]: value = "task-2960706" [ 625.638249] env[68217]: _type = "Task" [ 625.638249] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.648097] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960706, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.693750] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960703, 'name': Rename_Task, 'duration_secs': 0.288355} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.694618] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 625.694618] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3040308c-4a98-4ac3-8fbf-dfd01f757f65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.702918] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 625.702918] env[68217]: value = "task-2960707" [ 625.702918] env[68217]: _type = "Task" [ 625.702918] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.714895] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960707, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.978458] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52928930-4be0-cc07-8961-7242b8e09791, 'name': SearchDatastore_Task, 'duration_secs': 0.011337} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.981201] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 625.981328] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.981551] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 625.981688] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 625.981867] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 625.984302] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-372c8035-bac1-44f5-be05-58f401f617f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.006856] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.006856] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.006856] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e2c0c24-23ba-4150-9b1d-6b4e90c811cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.018028] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 626.018028] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526c7467-bace-4949-7dbe-4bbbc3b97030" [ 626.018028] env[68217]: _type = "Task" [ 626.018028] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.034479] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526c7467-bace-4949-7dbe-4bbbc3b97030, 'name': SearchDatastore_Task, 'duration_secs': 0.013031} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.035208] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f063b05-5a77-44e7-8cd3-95b1f9b35970 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.041195] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 626.041195] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226b526-db9e-9a94-bd90-937e20b53ef9" [ 626.041195] env[68217]: _type = "Task" [ 626.041195] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.052632] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226b526-db9e-9a94-bd90-937e20b53ef9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.151535] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960706, 'name': CreateVM_Task, 'duration_secs': 0.393083} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.151535] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 626.151535] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.152180] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.152180] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 626.159179] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a748a108-69d2-4ffb-9303-74ddaa7bb551 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.164714] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 626.164714] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272c39d-ee7a-f585-0e24-d56222389c71" [ 626.164714] env[68217]: _type = "Task" [ 626.164714] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.179098] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272c39d-ee7a-f585-0e24-d56222389c71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.216783] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960707, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.225136] env[68217]: DEBUG nova.network.neutron [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Updated VIF entry in instance network info cache for port c4d2ead2-9ca2-4172-bf51-fd9673282e6b. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 626.225136] env[68217]: DEBUG nova.network.neutron [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Updating instance_info_cache with network_info: [{"id": "c4d2ead2-9ca2-4172-bf51-fd9673282e6b", "address": "fa:16:3e:27:8f:71", "network": {"id": "a43cf623-7286-46d5-a15e-6bf13296df5c", "bridge": "br-int", "label": "tempest-ListServersNegativeTestJSON-1012854120-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "734fb9662f984c239bc1648eb0033ff4", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "132fdc50-e144-4a9b-8d77-6378eec02d9b", "external-id": "nsx-vlan-transportzone-118", "segmentation_id": 118, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4d2ead2-9c", "ovs_interfaceid": "c4d2ead2-9ca2-4172-bf51-fd9673282e6b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.280730] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01016fe-150a-4413-84b6-2078e8e642f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.288653] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-757eb2de-3fec-46c4-ad63-87a2f56d6050 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.325019] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75001544-cb73-4d4a-8f91-7adb9b05fdfe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.335616] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d03e9c7-983c-4d40-aa0b-f6d70b8aada2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.350806] env[68217]: DEBUG nova.compute.provider_tree [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 626.554534] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226b526-db9e-9a94-bd90-937e20b53ef9, 'name': SearchDatastore_Task, 'duration_secs': 0.013476} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.554863] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.555052] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 63e0fc9e-5182-4781-b007-69e2134718df/63e0fc9e-5182-4781-b007-69e2134718df.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 626.555535] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-afbe61ac-10d9-4ecd-a05a-350c488c4f89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.565973] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 626.565973] env[68217]: value = "task-2960708" [ 626.565973] env[68217]: _type = "Task" [ 626.565973] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.577018] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960708, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.686498] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272c39d-ee7a-f585-0e24-d56222389c71, 'name': SearchDatastore_Task, 'duration_secs': 0.012873} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.686498] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.686498] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 626.686498] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.686654] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 626.686654] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 626.686654] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d2b5604-b21e-464a-8f4d-71da607cf02a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.695691] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 626.695691] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 626.696314] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4ba46cd-2608-4b61-b695-9775447ff515 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.705343] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 626.705343] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a27e47-49fe-c739-b107-78d528a6e256" [ 626.705343] env[68217]: _type = "Task" [ 626.705343] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.717749] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.721020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.724287] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a27e47-49fe-c739-b107-78d528a6e256, 'name': SearchDatastore_Task, 'duration_secs': 0.011466} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.725693] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-887ef453-415f-499e-9ec4-08ff4d2f1adf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.732347] env[68217]: DEBUG oslo_concurrency.lockutils [req-e0ecb603-5d65-4286-a69d-d16a3a46768a req-71399ceb-7680-4e57-b9eb-edb12e36746b service nova] Releasing lock "refresh_cache-63e0fc9e-5182-4781-b007-69e2134718df" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 626.732925] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960707, 'name': PowerOnVM_Task, 'duration_secs': 0.855634} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.733745] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 626.734085] env[68217]: INFO nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Took 11.81 seconds to spawn the instance on the hypervisor. [ 626.734384] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 626.735279] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98fe3d03-2054-4cf5-b077-fee1f5ac46b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.739515] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 626.739515] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525d38d1-0c20-8e94-bd7b-b45c2e17db69" [ 626.739515] env[68217]: _type = "Task" [ 626.739515] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.753482] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525d38d1-0c20-8e94-bd7b-b45c2e17db69, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.878405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.878405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.878405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 626.878405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 626.878770] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 626.884081] env[68217]: INFO nova.compute.manager [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Terminating instance [ 626.889209] env[68217]: ERROR nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [req-a18a5740-bcde-497d-95e4-4b7c70a2dd63] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-a18a5740-bcde-497d-95e4-4b7c70a2dd63"}]} [ 626.918766] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 626.951848] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 626.953192] env[68217]: DEBUG nova.compute.provider_tree [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 626.967473] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 626.997644] env[68217]: DEBUG nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 627.078762] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960708, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.195274] env[68217]: DEBUG nova.network.neutron [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Successfully updated port: 59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 627.258925] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525d38d1-0c20-8e94-bd7b-b45c2e17db69, 'name': SearchDatastore_Task, 'duration_secs': 0.074517} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.264530] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 627.264530] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 678acc61-1c94-4152-b4e8-7569ab169ab9/678acc61-1c94-4152-b4e8-7569ab169ab9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 627.264993] env[68217]: INFO nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Took 37.47 seconds to build instance. [ 627.268627] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3961b9d5-2c40-486c-9bb4-de7af80527c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.274202] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 627.274202] env[68217]: value = "task-2960709" [ 627.274202] env[68217]: _type = "Task" [ 627.274202] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.294890] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960709, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.396016] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "refresh_cache-93b49e91-5e9a-4b11-a833-31ab0883e0e8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.397482] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "refresh_cache-93b49e91-5e9a-4b11-a833-31ab0883e0e8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.397693] env[68217]: DEBUG nova.network.neutron [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 627.578118] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960708, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.527985} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.581911] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 63e0fc9e-5182-4781-b007-69e2134718df/63e0fc9e-5182-4781-b007-69e2134718df.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 627.582164] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 627.582643] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7e59fa99-76bd-48b3-9b4d-4c097c8a9848 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.594364] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 627.594364] env[68217]: value = "task-2960710" [ 627.594364] env[68217]: _type = "Task" [ 627.594364] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.610596] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960710, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.691849] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f54e981-aa9a-4829-8714-3859521f8e0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.702866] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.703026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.703175] env[68217]: DEBUG nova.network.neutron [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 627.707848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d359dec9-8cc1-41a6-a349-2aa419327c14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.742100] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b72e57-5332-4458-9239-e8dc7f033b55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.749480] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 627.749759] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 627.753639] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14abcd2b-6a0d-4e4f-86e9-223a9c51f21d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.769421] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.462s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 627.769712] env[68217]: DEBUG nova.compute.provider_tree [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 627.783075] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960709, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.464239} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.783075] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 678acc61-1c94-4152-b4e8-7569ab169ab9/678acc61-1c94-4152-b4e8-7569ab169ab9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 627.783338] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 627.783506] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b61577cb-6d44-49bd-8cf7-dc14a0da00a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.793597] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 627.793597] env[68217]: value = "task-2960711" [ 627.793597] env[68217]: _type = "Task" [ 627.793597] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.807270] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960711, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 627.817490] env[68217]: DEBUG nova.compute.manager [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Received event network-changed-b9eba7e7-2ecb-4c93-bf49-1c2195e23121 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 627.817490] env[68217]: DEBUG nova.compute.manager [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Refreshing instance network info cache due to event network-changed-b9eba7e7-2ecb-4c93-bf49-1c2195e23121. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 627.817490] env[68217]: DEBUG oslo_concurrency.lockutils [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] Acquiring lock "refresh_cache-678acc61-1c94-4152-b4e8-7569ab169ab9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.817490] env[68217]: DEBUG oslo_concurrency.lockutils [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] Acquired lock "refresh_cache-678acc61-1c94-4152-b4e8-7569ab169ab9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 627.817945] env[68217]: DEBUG nova.network.neutron [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Refreshing network info cache for port b9eba7e7-2ecb-4c93-bf49-1c2195e23121 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 627.929796] env[68217]: DEBUG nova.network.neutron [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.000277] env[68217]: DEBUG nova.network.neutron [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.111294] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960710, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.105129} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.111294] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.111604] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-122b4606-e32c-4155-a3f9-af65fc05a789 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.133924] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Reconfiguring VM instance instance-00000012 to attach disk [datastore1] 63e0fc9e-5182-4781-b007-69e2134718df/63e0fc9e-5182-4781-b007-69e2134718df.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.134238] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3103951-1a86-41b5-89c6-36f8ca5111a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.154619] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 628.154619] env[68217]: value = "task-2960712" [ 628.154619] env[68217]: _type = "Task" [ 628.154619] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.164376] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960712, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.193082] env[68217]: DEBUG nova.compute.manager [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Received event network-vif-plugged-59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.193435] env[68217]: DEBUG oslo_concurrency.lockutils [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] Acquiring lock "83d32dd6-2629-4451-a746-bf5270083e2a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.193760] env[68217]: DEBUG oslo_concurrency.lockutils [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] Lock "83d32dd6-2629-4451-a746-bf5270083e2a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.193974] env[68217]: DEBUG oslo_concurrency.lockutils [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] Lock "83d32dd6-2629-4451-a746-bf5270083e2a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.194218] env[68217]: DEBUG nova.compute.manager [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] No waiting events found dispatching network-vif-plugged-59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 628.194444] env[68217]: WARNING nova.compute.manager [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Received unexpected event network-vif-plugged-59d5e487-25d1-47fb-8b16-ebba73a03a4a for instance with vm_state building and task_state spawning. [ 628.194782] env[68217]: DEBUG nova.compute.manager [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Received event network-changed-59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 628.194857] env[68217]: DEBUG nova.compute.manager [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Refreshing instance network info cache due to event network-changed-59d5e487-25d1-47fb-8b16-ebba73a03a4a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 628.195092] env[68217]: DEBUG oslo_concurrency.lockutils [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] Acquiring lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 628.273672] env[68217]: DEBUG nova.network.neutron [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 628.278509] env[68217]: DEBUG nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 628.300184] env[68217]: ERROR nova.scheduler.client.report [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [req-ad98cc7c-6dad-46e5-b691-789e1460f61b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ad98cc7c-6dad-46e5-b691-789e1460f61b"}]} [ 628.301268] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 5.967s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.304457] env[68217]: ERROR nova.compute.manager [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Setting instance vm_state to ERROR: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 (generation 46): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ad98cc7c-6dad-46e5-b691-789e1460f61b"}]} [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Traceback (most recent call last): [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self._delete_instance(context, instance, bdms) [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self._complete_deletion(context, instance) [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/compute/manager.py", line 930, in _complete_deletion [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self._update_resource_tracker(context, instance) [ 628.304457] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/compute/manager.py", line 696, in _update_resource_tracker [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self.rt.update_usage(context, instance, instance.node) [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] return f(*args, **kwargs) [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 702, in update_usage [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self._update(context.elevated(), self.compute_nodes[nodename]) [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self._update_to_placement(context, compute_node, startup) [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] raise attempt.get() [ 628.305200] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] six.reraise(self.value[0], self.value[1], self.value[2]) [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] raise value [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self.reportclient.update_from_provider_tree( [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] self.set_inventory_for_provider( [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 628.305763] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] raise exception.ResourceProviderUpdateConflict( [ 628.307492] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 (generation 46): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ad98cc7c-6dad-46e5-b691-789e1460f61b"}]} [ 628.307492] env[68217]: ERROR nova.compute.manager [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] [ 628.307492] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.614s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 628.308173] env[68217]: INFO nova.compute.claims [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 628.315655] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960711, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.29059} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.318426] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 628.319370] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a630369-917c-4753-bce1-f1820bad4913 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.347189] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Reconfiguring VM instance instance-00000013 to attach disk [datastore1] 678acc61-1c94-4152-b4e8-7569ab169ab9/678acc61-1c94-4152-b4e8-7569ab169ab9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 628.348871] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c83535d-562a-41b3-9dd6-6411926c9fad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.372468] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 628.372468] env[68217]: value = "task-2960713" [ 628.372468] env[68217]: _type = "Task" [ 628.372468] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.380755] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960713, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.503443] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "refresh_cache-93b49e91-5e9a-4b11-a833-31ab0883e0e8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 628.503695] env[68217]: DEBUG nova.compute.manager [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 628.504239] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 628.504793] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4b3caf-05c8-4112-b6a7-c0ee8f1d6229 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.519370] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 628.519658] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c21b008a-bddc-4de3-91ad-69089da18b04 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.530328] env[68217]: DEBUG oslo_vmware.api [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 628.530328] env[68217]: value = "task-2960714" [ 628.530328] env[68217]: _type = "Task" [ 628.530328] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.542312] env[68217]: DEBUG oslo_vmware.api [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960714, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.650007] env[68217]: DEBUG nova.network.neutron [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Updating instance_info_cache with network_info: [{"id": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "address": "fa:16:3e:54:1e:2f", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59d5e487-25", "ovs_interfaceid": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.677172] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960712, 'name': ReconfigVM_Task, 'duration_secs': 0.322319} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.677444] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Reconfigured VM instance instance-00000012 to attach disk [datastore1] 63e0fc9e-5182-4781-b007-69e2134718df/63e0fc9e-5182-4781-b007-69e2134718df.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 628.678086] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f517215f-86d0-4b60-9a6d-1e96e852453e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.685108] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 628.685108] env[68217]: value = "task-2960715" [ 628.685108] env[68217]: _type = "Task" [ 628.685108] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.694623] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960715, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.806787] env[68217]: DEBUG nova.network.neutron [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Updated VIF entry in instance network info cache for port b9eba7e7-2ecb-4c93-bf49-1c2195e23121. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 628.807207] env[68217]: DEBUG nova.network.neutron [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Updating instance_info_cache with network_info: [{"id": "b9eba7e7-2ecb-4c93-bf49-1c2195e23121", "address": "fa:16:3e:f6:7b:87", "network": {"id": "a70d300c-094f-483f-bfc9-9f273664a818", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-287149275-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "32d81faa5cbf43a888871c19126ea6e0", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "980cb890-345b-4bf8-990a-a2faec78e49c", "external-id": "nsx-vlan-transportzone-965", "segmentation_id": 965, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9eba7e7-2e", "ovs_interfaceid": "b9eba7e7-2ecb-4c93-bf49-1c2195e23121", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.813586] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 37.924s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 628.819808] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 628.883113] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960713, 'name': ReconfigVM_Task, 'duration_secs': 0.298349} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.883560] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Reconfigured VM instance instance-00000013 to attach disk [datastore1] 678acc61-1c94-4152-b4e8-7569ab169ab9/678acc61-1c94-4152-b4e8-7569ab169ab9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 628.884898] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-74d7b102-24b3-4761-a7e9-4cbb9c421116 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.893579] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 628.893579] env[68217]: value = "task-2960716" [ 628.893579] env[68217]: _type = "Task" [ 628.893579] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.904764] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960716, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.042458] env[68217]: DEBUG oslo_vmware.api [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960714, 'name': PowerOffVM_Task, 'duration_secs': 0.323487} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.043010] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 629.043010] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 629.043175] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-52e7539b-f068-4a3c-a420-c848dafd172e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.073300] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 629.073300] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 629.073300] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleting the datastore file [datastore1] 93b49e91-5e9a-4b11-a833-31ab0883e0e8 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 629.073300] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca8cec1e-6df0-407d-8a26-6e1b2b12e074 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.082158] env[68217]: DEBUG oslo_vmware.api [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 629.082158] env[68217]: value = "task-2960718" [ 629.082158] env[68217]: _type = "Task" [ 629.082158] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.098199] env[68217]: DEBUG oslo_vmware.api [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960718, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.154272] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Releasing lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.154272] env[68217]: DEBUG nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Instance network_info: |[{"id": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "address": "fa:16:3e:54:1e:2f", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59d5e487-25", "ovs_interfaceid": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 629.154615] env[68217]: DEBUG oslo_concurrency.lockutils [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] Acquired lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.154615] env[68217]: DEBUG nova.network.neutron [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Refreshing network info cache for port 59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 629.156781] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:54:1e:2f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '42f08482-a1da-405d-9918-d733d9f5173c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '59d5e487-25d1-47fb-8b16-ebba73a03a4a', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 629.165713] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Creating folder: Project (36c9130982204910a139a45ddad542c3). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 629.167078] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7812fd18-abdc-4f16-9108-adc3dd6732be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.182811] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Created folder: Project (36c9130982204910a139a45ddad542c3) in parent group-v594094. [ 629.183538] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Creating folder: Instances. Parent ref: group-v594144. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 629.184462] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-420bfb76-ca39-4acd-bf96-7abfd161cf4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.203141] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960715, 'name': Rename_Task, 'duration_secs': 0.148919} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.207942] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 629.212222] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Created folder: Instances in parent group-v594144. [ 629.212222] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 629.212222] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2d50301d-f5c3-4fe4-9488-18412e6d4ec9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.213669] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 629.214711] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ae9984f-de92-4472-964e-dea52a28c52e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.239579] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 629.239579] env[68217]: value = "task-2960721" [ 629.239579] env[68217]: _type = "Task" [ 629.239579] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.242675] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 629.242675] env[68217]: value = "task-2960722" [ 629.242675] env[68217]: _type = "Task" [ 629.242675] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.252975] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960721, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.260544] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960722, 'name': CreateVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.310684] env[68217]: DEBUG oslo_concurrency.lockutils [req-6a796407-80a2-44f1-bca0-f6dfa6734d4b req-44969d5d-c3f8-49d0-bde0-5194e82e1735 service nova] Releasing lock "refresh_cache-678acc61-1c94-4152-b4e8-7569ab169ab9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.344522] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 629.367269] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 629.367269] env[68217]: DEBUG nova.compute.provider_tree [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 629.391029] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 629.410736] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960716, 'name': Rename_Task, 'duration_secs': 0.150711} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.412639] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 629.416071] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 629.420427] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68f7a9f8-ff1b-4809-891d-c89740a4d73a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.426801] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 629.426801] env[68217]: value = "task-2960723" [ 629.426801] env[68217]: _type = "Task" [ 629.426801] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.435349] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960723, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.595709] env[68217]: DEBUG oslo_vmware.api [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960718, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.105982} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.596224] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 629.596620] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 629.596990] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 629.598035] env[68217]: INFO nova.compute.manager [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Took 1.09 seconds to destroy the instance on the hypervisor. [ 629.598035] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 629.600721] env[68217]: DEBUG nova.compute.manager [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 629.600721] env[68217]: DEBUG nova.network.neutron [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 629.628558] env[68217]: DEBUG nova.network.neutron [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 629.755247] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960722, 'name': CreateVM_Task, 'duration_secs': 0.360158} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.758961] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 629.758961] env[68217]: DEBUG oslo_vmware.api [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960721, 'name': PowerOnVM_Task, 'duration_secs': 0.492418} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.762142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.762142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.762505] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 629.762606] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.763052] env[68217]: INFO nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Took 10.81 seconds to spawn the instance on the hypervisor. [ 629.763285] env[68217]: DEBUG nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 629.763689] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0b70c32-0b1f-4338-8473-0f87066c2184 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.765886] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-559d357f-4b3c-4839-b549-74e8e2ff319f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.781980] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 629.781980] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52019b81-69b0-fb23-e2c5-6a80fdf401a1" [ 629.781980] env[68217]: _type = "Task" [ 629.781980] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.792157] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52019b81-69b0-fb23-e2c5-6a80fdf401a1, 'name': SearchDatastore_Task, 'duration_secs': 0.01013} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.792605] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 629.792940] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 629.793175] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.795870] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 629.795870] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 629.795870] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7a4b435-1091-491c-b4ec-80200db7d07e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.804610] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 629.804610] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 629.807667] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68b85a9-a1bc-4199-8386-af8ec1f82dcb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.815744] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 629.815744] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52aee139-89aa-f54c-ec2c-b61589b66ab1" [ 629.815744] env[68217]: _type = "Task" [ 629.815744] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.835514] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52aee139-89aa-f54c-ec2c-b61589b66ab1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.942626] env[68217]: DEBUG oslo_vmware.api [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960723, 'name': PowerOnVM_Task, 'duration_secs': 0.498783} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.945694] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 629.947023] env[68217]: INFO nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Took 8.26 seconds to spawn the instance on the hypervisor. [ 629.948306] env[68217]: DEBUG nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 629.949478] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f85d834-a176-40e2-a567-6faf2e026eaf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.135623] env[68217]: DEBUG nova.network.neutron [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.155957] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7044af6d-5df7-4a10-ba6b-60fb35f259dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.169028] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a6bf9d6-c9fb-40d1-ba1d-30fd40bfca5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.209224] env[68217]: DEBUG nova.network.neutron [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Updated VIF entry in instance network info cache for port 59d5e487-25d1-47fb-8b16-ebba73a03a4a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 630.209224] env[68217]: DEBUG nova.network.neutron [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Updating instance_info_cache with network_info: [{"id": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "address": "fa:16:3e:54:1e:2f", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59d5e487-25", "ovs_interfaceid": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.212471] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59b15c6c-d34d-438e-9637-415c7dbc59fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.223159] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f659d2-2883-47ef-bb29-d0ce76810200 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.244143] env[68217]: DEBUG nova.compute.provider_tree [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.291111] env[68217]: INFO nova.compute.manager [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Took 39.39 seconds to build instance. [ 630.330188] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52aee139-89aa-f54c-ec2c-b61589b66ab1, 'name': SearchDatastore_Task, 'duration_secs': 0.01417} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.331008] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-95ea9784-e799-473b-b74e-886fddd13172 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.337102] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 630.337102] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52550030-ab0f-427f-3cfd-b6412b511c3e" [ 630.337102] env[68217]: _type = "Task" [ 630.337102] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.341819] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 630.347392] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52550030-ab0f-427f-3cfd-b6412b511c3e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.476175] env[68217]: INFO nova.compute.manager [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Took 38.56 seconds to build instance. [ 630.641253] env[68217]: INFO nova.compute.manager [-] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Took 1.04 seconds to deallocate network for instance. [ 630.714855] env[68217]: DEBUG oslo_concurrency.lockutils [req-e147a832-f7d1-4ac3-8f9c-f9191987ca34 req-d5a6c1d7-dc48-4482-a1e1-774274ae03b6 service nova] Releasing lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.775458] env[68217]: ERROR nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [req-c7a4c73e-d7c9-4240-b9d9-2836ffed2008] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c7a4c73e-d7c9-4240-b9d9-2836ffed2008"}]} [ 630.796767] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 630.799150] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1527248-2959-4f82-b7ab-25acda1d21a3 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "63e0fc9e-5182-4781-b007-69e2134718df" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.446s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 630.823694] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 630.823918] env[68217]: DEBUG nova.compute.provider_tree [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 630.838581] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 630.852080] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52550030-ab0f-427f-3cfd-b6412b511c3e, 'name': SearchDatastore_Task, 'duration_secs': 0.026879} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.852248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 630.852568] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 83d32dd6-2629-4451-a746-bf5270083e2a/83d32dd6-2629-4451-a746-bf5270083e2a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 630.853289] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fe62a341-a115-4ce1-b0be-dc79a536321f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.860093] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 630.860093] env[68217]: value = "task-2960728" [ 630.860093] env[68217]: _type = "Task" [ 630.860093] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.866742] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 630.874517] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960728, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.978374] env[68217]: DEBUG oslo_concurrency.lockutils [None req-827836a7-0e1e-4a37-8fff-5b2c20e67df1 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.424s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.032414] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.032634] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "580e6909-7d05-447a-a378-f0b8b71f059a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.148908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.308121] env[68217]: DEBUG nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.374816] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960728, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.482058] env[68217]: DEBUG nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 631.488072] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ac9579a-519a-4aff-a97c-1e9a98e2e795 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.496209] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a71dedd-e0f5-45ac-9612-b367f782270a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.536485] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd20d011-33ba-4579-a08e-7096787e09d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.547748] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2104b105-e48c-42d0-bd74-1fd9dc5c2f75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.565442] env[68217]: DEBUG nova.compute.provider_tree [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 631.831257] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.875810] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960728, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.908815} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.877147] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 83d32dd6-2629-4451-a746-bf5270083e2a/83d32dd6-2629-4451-a746-bf5270083e2a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 631.877440] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 631.877704] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3576158f-082a-49bf-8b83-b7135cf4fab2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.885033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.885033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.885130] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.885262] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.885425] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.887181] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 631.887181] env[68217]: value = "task-2960729" [ 631.887181] env[68217]: _type = "Task" [ 631.887181] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 631.887763] env[68217]: INFO nova.compute.manager [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Terminating instance [ 631.900804] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.946697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "678acc61-1c94-4152-b4e8-7569ab169ab9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.946882] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.947286] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "678acc61-1c94-4152-b4e8-7569ab169ab9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 631.947362] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 631.947973] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 631.950699] env[68217]: INFO nova.compute.manager [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Terminating instance [ 632.002732] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.108493] env[68217]: DEBUG nova.scheduler.client.report [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 48 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 632.108757] env[68217]: DEBUG nova.compute.provider_tree [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 48 to 49 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 632.108940] env[68217]: DEBUG nova.compute.provider_tree [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 632.398471] env[68217]: DEBUG nova.compute.manager [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 632.398875] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 632.399279] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.303444} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.400114] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2632ff2-93a2-44d9-be40-771737191b0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.404102] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 632.406657] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1408c29-21a2-453b-97e6-1d826ced4d3d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.431708] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Reconfiguring VM instance instance-00000014 to attach disk [datastore1] 83d32dd6-2629-4451-a746-bf5270083e2a/83d32dd6-2629-4451-a746-bf5270083e2a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 632.434332] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3d9d22aa-745e-4e81-a05c-48c2dff17db6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.451300] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 632.451666] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ee264de7-44c0-41c3-bbbf-4c926dc80de2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.455664] env[68217]: DEBUG nova.compute.manager [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 632.455664] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 632.456635] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b854b326-d378-4ab4-aa0a-5d87c43550b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.462357] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 632.462357] env[68217]: value = "task-2960730" [ 632.462357] env[68217]: _type = "Task" [ 632.462357] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.472858] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 632.472858] env[68217]: DEBUG oslo_vmware.api [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 632.472858] env[68217]: value = "task-2960731" [ 632.472858] env[68217]: _type = "Task" [ 632.472858] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.472858] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52823fcf-491b-48b1-99c5-39ac3034c5c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.486136] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960730, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.493329] env[68217]: DEBUG oslo_vmware.api [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960731, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.494919] env[68217]: DEBUG oslo_vmware.api [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 632.494919] env[68217]: value = "task-2960732" [ 632.494919] env[68217]: _type = "Task" [ 632.494919] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.506888] env[68217]: DEBUG oslo_vmware.api [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960732, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 632.616739] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 4.310s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.617358] env[68217]: DEBUG nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 632.628731] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.272s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.630756] env[68217]: INFO nova.compute.claims [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 632.696983] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.697502] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.697740] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 632.698029] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 632.698164] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 632.700583] env[68217]: INFO nova.compute.manager [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Terminating instance [ 632.974352] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960730, 'name': ReconfigVM_Task, 'duration_secs': 0.472055} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.974681] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Reconfigured VM instance instance-00000014 to attach disk [datastore1] 83d32dd6-2629-4451-a746-bf5270083e2a/83d32dd6-2629-4451-a746-bf5270083e2a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 632.975599] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-56ef4246-1ecb-4453-8bf7-ba614d41b61d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.985742] env[68217]: DEBUG oslo_vmware.api [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960731, 'name': PowerOffVM_Task, 'duration_secs': 0.29614} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 632.986944] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 632.987149] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 632.987458] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 632.987458] env[68217]: value = "task-2960733" [ 632.987458] env[68217]: _type = "Task" [ 632.987458] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 632.987730] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-683af064-7bd9-4de0-b18f-9ed9d52e2fe8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.001572] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960733, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.007122] env[68217]: DEBUG oslo_vmware.api [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960732, 'name': PowerOffVM_Task, 'duration_secs': 0.245501} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.007372] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.008164] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.008164] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9a980ed2-e210-46b2-a34e-bfc368c7f44c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.072750] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.073133] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.073133] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleting the datastore file [datastore1] aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.073515] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c46a1a44-bd24-4a02-bc24-50e46cafedb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.079447] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.079675] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.079882] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Deleting the datastore file [datastore1] 678acc61-1c94-4152-b4e8-7569ab169ab9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.080187] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-46bef696-d948-44e6-a91d-cfc2f68b2b17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.083958] env[68217]: DEBUG oslo_vmware.api [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 633.083958] env[68217]: value = "task-2960736" [ 633.083958] env[68217]: _type = "Task" [ 633.083958] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.090031] env[68217]: DEBUG oslo_vmware.api [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for the task: (returnval){ [ 633.090031] env[68217]: value = "task-2960737" [ 633.090031] env[68217]: _type = "Task" [ 633.090031] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.096202] env[68217]: DEBUG oslo_vmware.api [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960736, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.100384] env[68217]: DEBUG oslo_vmware.api [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960737, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.137515] env[68217]: DEBUG nova.compute.utils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 633.141397] env[68217]: DEBUG nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 633.141607] env[68217]: DEBUG nova.network.neutron [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 633.200296] env[68217]: DEBUG nova.policy [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96d4c4affb734e3c9e36c8d028f1b42f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abe88ad43d2c4fd681e7d2aa42c7d362', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 633.205912] env[68217]: DEBUG nova.compute.manager [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 633.206190] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 633.207196] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1103b4-3a56-422b-82d1-44007a30f869 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.215257] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 633.217019] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ceb0abab-3d0d-44d7-a7cd-3b7eecbbedec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.222269] env[68217]: DEBUG oslo_vmware.api [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 633.222269] env[68217]: value = "task-2960738" [ 633.222269] env[68217]: _type = "Task" [ 633.222269] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.232289] env[68217]: DEBUG oslo_vmware.api [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960738, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.506659] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960733, 'name': Rename_Task, 'duration_secs': 0.185969} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.507342] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 633.507614] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8a585954-d6b6-423f-93c6-882261d41c77 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.518161] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 633.518161] env[68217]: value = "task-2960739" [ 633.518161] env[68217]: _type = "Task" [ 633.518161] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.526985] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960739, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 633.597504] env[68217]: DEBUG oslo_vmware.api [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960736, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18425} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.597784] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 633.597960] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 633.598232] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.598421] env[68217]: INFO nova.compute.manager [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Took 1.20 seconds to destroy the instance on the hypervisor. [ 633.598717] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.599332] env[68217]: DEBUG nova.compute.manager [-] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 633.599432] env[68217]: DEBUG nova.network.neutron [-] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.605702] env[68217]: DEBUG oslo_vmware.api [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Task: {'id': task-2960737, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.185233} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.606290] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 633.606468] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 633.606639] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 633.606807] env[68217]: INFO nova.compute.manager [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Took 1.15 seconds to destroy the instance on the hypervisor. [ 633.607095] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 633.607248] env[68217]: DEBUG nova.compute.manager [-] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 633.607343] env[68217]: DEBUG nova.network.neutron [-] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 633.644153] env[68217]: DEBUG nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 633.736490] env[68217]: DEBUG oslo_vmware.api [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960738, 'name': PowerOffVM_Task, 'duration_secs': 0.323966} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 633.736918] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 633.737110] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 633.737488] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-21f2cdaa-60c8-4cbf-afac-6ee60b397d75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.816350] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 633.816582] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 633.816768] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleting the datastore file [datastore2] 14c8e8e6-5d7f-45b4-8a84-d5951c38573f {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 633.817060] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27189232-5342-4093-8a55-9c9e43062a34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.826467] env[68217]: DEBUG oslo_vmware.api [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 633.826467] env[68217]: value = "task-2960742" [ 633.826467] env[68217]: _type = "Task" [ 633.826467] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 633.834511] env[68217]: DEBUG oslo_vmware.api [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960742, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.029442] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960739, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 634.088502] env[68217]: DEBUG nova.network.neutron [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Successfully created port: 683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 634.335971] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99640741-07cf-40ed-b003-7a02b6fd14b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.342678] env[68217]: DEBUG oslo_vmware.api [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960742, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.341129} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.343346] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 634.343542] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 634.343717] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.343886] env[68217]: INFO nova.compute.manager [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Took 1.14 seconds to destroy the instance on the hypervisor. [ 634.344154] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 634.344346] env[68217]: DEBUG nova.compute.manager [-] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 634.344505] env[68217]: DEBUG nova.network.neutron [-] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 634.349016] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d865d8-8aa8-47b0-9bb1-f1475489707f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.387267] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa631aa3-e0e7-4da9-a6be-91e561cbaee3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.402363] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f96aba59-59d0-4d0a-a1f5-fbf02d38f713 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.415245] env[68217]: DEBUG nova.compute.provider_tree [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 634.420666] env[68217]: DEBUG nova.compute.manager [req-58f0e01d-dd63-40d1-83d7-a1eca9a47cc9 req-7ce1d1f3-8b51-472b-be35-f4db789debc9 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Received event network-vif-deleted-b9eba7e7-2ecb-4c93-bf49-1c2195e23121 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.420911] env[68217]: INFO nova.compute.manager [req-58f0e01d-dd63-40d1-83d7-a1eca9a47cc9 req-7ce1d1f3-8b51-472b-be35-f4db789debc9 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Neutron deleted interface b9eba7e7-2ecb-4c93-bf49-1c2195e23121; detaching it from the instance and deleting it from the info cache [ 634.420950] env[68217]: DEBUG nova.network.neutron [req-58f0e01d-dd63-40d1-83d7-a1eca9a47cc9 req-7ce1d1f3-8b51-472b-be35-f4db789debc9 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.422329] env[68217]: DEBUG nova.network.neutron [-] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.530977] env[68217]: DEBUG oslo_vmware.api [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960739, 'name': PowerOnVM_Task, 'duration_secs': 0.625186} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 634.531483] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 634.531692] env[68217]: INFO nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Took 10.17 seconds to spawn the instance on the hypervisor. [ 634.532201] env[68217]: DEBUG nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 634.533194] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc9bec5-0117-4d09-9fd1-635c066baec8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.659392] env[68217]: DEBUG nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 634.695853] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 634.695853] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 634.695853] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 634.696134] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 634.696134] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 634.696134] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 634.696134] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 634.696134] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 634.696297] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 634.696297] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 634.696366] env[68217]: DEBUG nova.virt.hardware [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 634.697569] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77236bb7-3b3d-4d19-a132-b3b6749d210f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.707089] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f81e466-aa99-420f-9bb6-42b2d98d8b59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.827799] env[68217]: DEBUG nova.compute.manager [req-dd9d588c-3582-4226-ab5c-5337fd228993 req-bc932655-4814-44c1-849f-75e392037b63 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Received event network-vif-deleted-800cf755-2034-482c-a604-63fbfe457f26 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 634.827990] env[68217]: INFO nova.compute.manager [req-dd9d588c-3582-4226-ab5c-5337fd228993 req-bc932655-4814-44c1-849f-75e392037b63 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Neutron deleted interface 800cf755-2034-482c-a604-63fbfe457f26; detaching it from the instance and deleting it from the info cache [ 634.828171] env[68217]: DEBUG nova.network.neutron [req-dd9d588c-3582-4226-ab5c-5337fd228993 req-bc932655-4814-44c1-849f-75e392037b63 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.891538] env[68217]: DEBUG nova.network.neutron [-] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.925693] env[68217]: DEBUG nova.scheduler.client.report [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 634.931903] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5071010e-2637-4f5b-94d1-ddb3b4050e86 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.936974] env[68217]: INFO nova.compute.manager [-] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Took 1.33 seconds to deallocate network for instance. [ 634.955419] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7e835f2-b2da-467a-9426-61b074443802 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.991625] env[68217]: DEBUG nova.compute.manager [req-58f0e01d-dd63-40d1-83d7-a1eca9a47cc9 req-7ce1d1f3-8b51-472b-be35-f4db789debc9 service nova] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Detach interface failed, port_id=b9eba7e7-2ecb-4c93-bf49-1c2195e23121, reason: Instance 678acc61-1c94-4152-b4e8-7569ab169ab9 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 635.053149] env[68217]: INFO nova.compute.manager [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Took 42.52 seconds to build instance. [ 635.145023] env[68217]: DEBUG nova.network.neutron [-] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.334041] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a9843b6c-d7b5-45ad-ba06-0db2268d1505 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.343635] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60681b04-304d-4d2d-890b-6aae7f4c3005 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 635.375903] env[68217]: DEBUG nova.compute.manager [req-dd9d588c-3582-4226-ab5c-5337fd228993 req-bc932655-4814-44c1-849f-75e392037b63 service nova] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Detach interface failed, port_id=800cf755-2034-482c-a604-63fbfe457f26, reason: Instance aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 635.396836] env[68217]: INFO nova.compute.manager [-] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Took 1.80 seconds to deallocate network for instance. [ 635.438863] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.813s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.439713] env[68217]: DEBUG nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 635.442448] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 36.259s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 635.442585] env[68217]: DEBUG nova.objects.instance [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lazy-loading 'resources' on Instance uuid b0b21c65-ef3d-4492-a6b2-d2321a3dacde {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 635.446264] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.558138] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6ae7223f-f4c9-45cb-8315-65a1bc0ae5da tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "83d32dd6-2629-4451-a746-bf5270083e2a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.744s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 635.647944] env[68217]: INFO nova.compute.manager [-] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Took 1.30 seconds to deallocate network for instance. [ 635.907125] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 635.951587] env[68217]: DEBUG nova.compute.utils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 635.954855] env[68217]: DEBUG nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 635.954855] env[68217]: DEBUG nova.network.neutron [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 636.043552] env[68217]: DEBUG nova.policy [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e63a0e21c83c4c7e9df31c27f7626462', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'dc0671abf27a426d9cee1db45efe4757', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 636.061157] env[68217]: DEBUG nova.network.neutron [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Successfully updated port: 683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 636.067383] env[68217]: DEBUG nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 636.160492] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.420215] env[68217]: DEBUG nova.network.neutron [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Successfully created port: 8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 636.432049] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "71243775-e8df-4cc5-85c9-d64a244b4426" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.433030] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "71243775-e8df-4cc5-85c9-d64a244b4426" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.460991] env[68217]: DEBUG nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 636.569453] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.569721] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.569970] env[68217]: DEBUG nova.network.neutron [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 636.592840] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.644125] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2934285-bcf2-4e86-809c-6230640528b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.653766] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29275f40-1b94-4eb3-8daf-b58d91b111dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.689285] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fa2ca3-f7ca-4823-be9d-d8c0b48951a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.693916] env[68217]: DEBUG nova.compute.manager [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Received event network-vif-deleted-67797938-23e4-4820-a467-727dfd4fca29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 636.694125] env[68217]: DEBUG nova.compute.manager [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Received event network-changed-59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 636.694289] env[68217]: DEBUG nova.compute.manager [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Refreshing instance network info cache due to event network-changed-59d5e487-25d1-47fb-8b16-ebba73a03a4a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 636.694502] env[68217]: DEBUG oslo_concurrency.lockutils [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] Acquiring lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.694630] env[68217]: DEBUG oslo_concurrency.lockutils [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] Acquired lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 636.694781] env[68217]: DEBUG nova.network.neutron [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Refreshing network info cache for port 59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 636.703312] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b723ad99-6a6f-4c82-a76b-7682674d08af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.719948] env[68217]: DEBUG nova.compute.provider_tree [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.905622] env[68217]: DEBUG nova.compute.manager [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received event network-vif-plugged-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 636.905844] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] Acquiring lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 636.906064] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 636.906235] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 636.906426] env[68217]: DEBUG nova.compute.manager [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] No waiting events found dispatching network-vif-plugged-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 636.906596] env[68217]: WARNING nova.compute.manager [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received unexpected event network-vif-plugged-683c092b-4729-4946-9f3a-b14200be8d7c for instance with vm_state building and task_state spawning. [ 636.906770] env[68217]: DEBUG nova.compute.manager [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 636.906935] env[68217]: DEBUG nova.compute.manager [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing instance network info cache due to event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 636.907171] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] Acquiring lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.110700] env[68217]: DEBUG nova.network.neutron [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 637.223176] env[68217]: DEBUG nova.scheduler.client.report [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 637.355284] env[68217]: DEBUG nova.network.neutron [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [{"id": "683c092b-4729-4946-9f3a-b14200be8d7c", "address": "fa:16:3e:52:04:da", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap683c092b-47", "ovs_interfaceid": "683c092b-4729-4946-9f3a-b14200be8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.471293] env[68217]: DEBUG nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 637.504733] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 637.505035] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.505431] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 637.505624] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.505769] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 637.505915] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 637.506325] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 637.506497] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 637.506672] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 637.506838] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 637.507045] env[68217]: DEBUG nova.virt.hardware [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 637.507870] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae594c6a-07bd-447e-a28f-ac0dc41ceb72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.516337] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f5c14c0-84a6-4434-825c-b5e974aa54a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.549065] env[68217]: DEBUG nova.network.neutron [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Updated VIF entry in instance network info cache for port 59d5e487-25d1-47fb-8b16-ebba73a03a4a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 637.549464] env[68217]: DEBUG nova.network.neutron [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Updating instance_info_cache with network_info: [{"id": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "address": "fa:16:3e:54:1e:2f", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap59d5e487-25", "ovs_interfaceid": "59d5e487-25d1-47fb-8b16-ebba73a03a4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.730166] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.288s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 637.732836] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 27.043s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 637.733311] env[68217]: DEBUG nova.objects.instance [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 637.753657] env[68217]: INFO nova.scheduler.client.report [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Deleted allocations for instance b0b21c65-ef3d-4492-a6b2-d2321a3dacde [ 637.858692] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Releasing lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 637.859084] env[68217]: DEBUG nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Instance network_info: |[{"id": "683c092b-4729-4946-9f3a-b14200be8d7c", "address": "fa:16:3e:52:04:da", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap683c092b-47", "ovs_interfaceid": "683c092b-4729-4946-9f3a-b14200be8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 637.859538] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] Acquired lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 637.859639] env[68217]: DEBUG nova.network.neutron [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 637.860891] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:04:da', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '863474bc-a24a-4823-828c-580a187829e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '683c092b-4729-4946-9f3a-b14200be8d7c', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 637.873849] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Creating folder: Project (abe88ad43d2c4fd681e7d2aa42c7d362). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 637.877296] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5642006-4552-4277-ae72-722cee0ce595 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.892761] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Created folder: Project (abe88ad43d2c4fd681e7d2aa42c7d362) in parent group-v594094. [ 637.893120] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Creating folder: Instances. Parent ref: group-v594150. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 637.893429] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c76ef71c-e044-4dc8-948d-055f3d71aa82 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.910617] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Created folder: Instances in parent group-v594150. [ 637.910617] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 637.910755] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 637.910956] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-abe5cfd4-4d56-4498-a489-701882f07b01 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.938203] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 637.938203] env[68217]: value = "task-2960747" [ 637.938203] env[68217]: _type = "Task" [ 637.938203] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.947948] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960747, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.959521] env[68217]: DEBUG nova.network.neutron [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Successfully updated port: 8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 638.052629] env[68217]: DEBUG oslo_concurrency.lockutils [req-0b9615ed-1a45-4d99-8368-2fa7c8710410 req-b951e6ef-f112-4380-aeba-7ead3bcdbaf5 service nova] Releasing lock "refresh_cache-83d32dd6-2629-4451-a746-bf5270083e2a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.263551] env[68217]: DEBUG oslo_concurrency.lockutils [None req-992b4c2b-dffd-4476-be98-0e4eb21258e3 tempest-ServerExternalEventsTest-1120657847 tempest-ServerExternalEventsTest-1120657847-project-member] Lock "b0b21c65-ef3d-4492-a6b2-d2321a3dacde" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.944s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.451490] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960747, 'name': CreateVM_Task, 'duration_secs': 0.352869} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.451684] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 638.452635] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.452802] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.453469] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 638.453469] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2e061e0e-4f8e-4cf1-89f6-f4dc26ed11c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.458166] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 638.458166] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5202f776-4b47-50d7-ab96-7445ad7996c1" [ 638.458166] env[68217]: _type = "Task" [ 638.458166] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.463860] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.464072] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquired lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.464205] env[68217]: DEBUG nova.network.neutron [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 638.468916] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5202f776-4b47-50d7-ab96-7445ad7996c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.613277] env[68217]: DEBUG nova.network.neutron [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updated VIF entry in instance network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 638.613635] env[68217]: DEBUG nova.network.neutron [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [{"id": "683c092b-4729-4946-9f3a-b14200be8d7c", "address": "fa:16:3e:52:04:da", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap683c092b-47", "ovs_interfaceid": "683c092b-4729-4946-9f3a-b14200be8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.696494] env[68217]: DEBUG nova.compute.manager [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Received event network-vif-plugged-8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.696494] env[68217]: DEBUG oslo_concurrency.lockutils [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] Acquiring lock "db4cf157-9511-423c-aa41-433af8d92b48-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 638.696557] env[68217]: DEBUG oslo_concurrency.lockutils [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] Lock "db4cf157-9511-423c-aa41-433af8d92b48-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.696837] env[68217]: DEBUG oslo_concurrency.lockutils [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] Lock "db4cf157-9511-423c-aa41-433af8d92b48-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.696903] env[68217]: DEBUG nova.compute.manager [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] No waiting events found dispatching network-vif-plugged-8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 638.697055] env[68217]: WARNING nova.compute.manager [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Received unexpected event network-vif-plugged-8fbb4d07-34f6-4f5c-8057-fd5de704aba2 for instance with vm_state building and task_state spawning. [ 638.697251] env[68217]: DEBUG nova.compute.manager [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Received event network-changed-8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 638.697414] env[68217]: DEBUG nova.compute.manager [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Refreshing instance network info cache due to event network-changed-8fbb4d07-34f6-4f5c-8057-fd5de704aba2. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 638.697577] env[68217]: DEBUG oslo_concurrency.lockutils [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] Acquiring lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.746092] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5c7163aa-cc29-4779-89f1-85feb0a4afce tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.013s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 638.747204] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.759s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 638.748750] env[68217]: INFO nova.compute.claims [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 638.974115] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5202f776-4b47-50d7-ab96-7445ad7996c1, 'name': SearchDatastore_Task, 'duration_secs': 0.013396} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.974620] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 638.974714] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.974851] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 638.974989] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 638.975216] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 638.975773] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4f8056d2-784b-4e94-bc28-43da8c1ed6c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.990588] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 638.990781] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 638.991608] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-80c9d123-e146-4cb5-9c01-b8f3d98c1b90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.002698] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 639.002698] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52681c24-eb64-8a28-f025-cd760ef6151e" [ 639.002698] env[68217]: _type = "Task" [ 639.002698] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.015029] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52681c24-eb64-8a28-f025-cd760ef6151e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.036016] env[68217]: DEBUG nova.network.neutron [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 639.117058] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9b07ef-ce79-48b1-8f21-42f0ed4a4bf5 req-a4e89c1c-1769-4061-89f9-9241ff77595d service nova] Releasing lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.185736] env[68217]: DEBUG nova.network.neutron [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Updating instance_info_cache with network_info: [{"id": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "address": "fa:16:3e:ef:be:20", "network": {"id": "366acb23-46bf-4747-84d7-c9228b0e4d91", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-574329409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc0671abf27a426d9cee1db45efe4757", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fbb4d07-34", "ovs_interfaceid": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.515249] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52681c24-eb64-8a28-f025-cd760ef6151e, 'name': SearchDatastore_Task, 'duration_secs': 0.038683} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 639.515705] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-00886d18-4a8e-4508-b2e3-f15ca2c84927 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.522294] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 639.522294] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5256bce8-35be-304b-f1ae-6942384b136b" [ 639.522294] env[68217]: _type = "Task" [ 639.522294] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.530870] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5256bce8-35be-304b-f1ae-6942384b136b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.688643] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Releasing lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 639.688995] env[68217]: DEBUG nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Instance network_info: |[{"id": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "address": "fa:16:3e:ef:be:20", "network": {"id": "366acb23-46bf-4747-84d7-c9228b0e4d91", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-574329409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc0671abf27a426d9cee1db45efe4757", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fbb4d07-34", "ovs_interfaceid": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 639.689370] env[68217]: DEBUG oslo_concurrency.lockutils [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] Acquired lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 639.689548] env[68217]: DEBUG nova.network.neutron [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Refreshing network info cache for port 8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 639.690763] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ef:be:20', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e1c9bb98-73a9-48eb-856e-a541afe9b07b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8fbb4d07-34f6-4f5c-8057-fd5de704aba2', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 639.699951] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Creating folder: Project (dc0671abf27a426d9cee1db45efe4757). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 639.703035] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e0bb1664-7c2e-467b-845d-5d763d4d9aaf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.718951] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Created folder: Project (dc0671abf27a426d9cee1db45efe4757) in parent group-v594094. [ 639.719369] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Creating folder: Instances. Parent ref: group-v594153. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 639.719436] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72b3d866-946d-4d27-a835-0db12c130d78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.730677] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Created folder: Instances in parent group-v594153. [ 639.730923] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 639.731135] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 639.731345] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-303e11f8-f8c8-4ffe-a0f8-1e29c7921ab0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.754636] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 639.754636] env[68217]: value = "task-2960750" [ 639.754636] env[68217]: _type = "Task" [ 639.754636] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 639.768148] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960750, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 639.947125] env[68217]: DEBUG nova.network.neutron [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Updated VIF entry in instance network info cache for port 8fbb4d07-34f6-4f5c-8057-fd5de704aba2. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 639.947730] env[68217]: DEBUG nova.network.neutron [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Updating instance_info_cache with network_info: [{"id": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "address": "fa:16:3e:ef:be:20", "network": {"id": "366acb23-46bf-4747-84d7-c9228b0e4d91", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-574329409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc0671abf27a426d9cee1db45efe4757", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fbb4d07-34", "ovs_interfaceid": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.035477] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5256bce8-35be-304b-f1ae-6942384b136b, 'name': SearchDatastore_Task, 'duration_secs': 0.010864} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.035756] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.036038] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] dfeeed37-8c84-4ecc-87ea-f4239f512fb1/dfeeed37-8c84-4ecc-87ea-f4239f512fb1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 640.036714] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0460864c-4b2f-4838-92fa-e05377f6f94a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.051039] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 640.051039] env[68217]: value = "task-2960752" [ 640.051039] env[68217]: _type = "Task" [ 640.051039] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.060112] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960752, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.267381] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960750, 'name': CreateVM_Task, 'duration_secs': 0.383083} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.267533] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 640.268451] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.268708] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.269083] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 640.272972] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7def3c0b-96dd-47cf-b644-ce759c848a58 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.279809] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 640.279809] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ebb40e-3e60-12e7-988d-5f1018b65241" [ 640.279809] env[68217]: _type = "Task" [ 640.279809] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.291626] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ebb40e-3e60-12e7-988d-5f1018b65241, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.328443] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b603b9de-7fad-4dfb-9ac3-1c6775299b8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.337362] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b30eff61-bce4-4daa-bf14-220a9caf35bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.373881] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4408edd3-2e31-49a7-a6d4-fb13fb2c1fe8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.384781] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd630f5a-392b-4cb8-8faf-6426c486e2f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.403374] env[68217]: DEBUG nova.compute.provider_tree [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 640.450372] env[68217]: DEBUG oslo_concurrency.lockutils [req-fdedd9a9-1615-4daf-bbf1-082e125c7ff9 req-be094569-3d8f-4279-972f-a59173016808 service nova] Releasing lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.565801] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960752, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.791941] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ebb40e-3e60-12e7-988d-5f1018b65241, 'name': SearchDatastore_Task, 'duration_secs': 0.04704} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 640.791941] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 640.791941] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 640.791941] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.792119] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 640.792119] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 640.792324] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-175a5dd0-597e-41d5-b6a3-7a1a85b31982 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.802802] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 640.802892] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 640.803729] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d65570e7-2fb8-4c96-a705-7e8786c2c019 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.809795] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 640.809795] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5209284e-d990-469d-a462-84f5bf555c4d" [ 640.809795] env[68217]: _type = "Task" [ 640.809795] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 640.818508] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5209284e-d990-469d-a462-84f5bf555c4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 640.910990] env[68217]: DEBUG nova.scheduler.client.report [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 641.063355] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960752, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604743} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.064316] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] dfeeed37-8c84-4ecc-87ea-f4239f512fb1/dfeeed37-8c84-4ecc-87ea-f4239f512fb1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 641.064316] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 641.064316] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-db728cb7-6315-45f9-8965-7c7f1cd37a18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.076613] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 641.076613] env[68217]: value = "task-2960753" [ 641.076613] env[68217]: _type = "Task" [ 641.076613] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.086119] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960753, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.324544] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5209284e-d990-469d-a462-84f5bf555c4d, 'name': SearchDatastore_Task, 'duration_secs': 0.014015} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.325807] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5238be25-b7ad-4568-a805-1b8c9209c3b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.332878] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 641.332878] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5293cac8-5165-6b98-0622-2ec4ae2f4d9f" [ 641.332878] env[68217]: _type = "Task" [ 641.332878] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.342366] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5293cac8-5165-6b98-0622-2ec4ae2f4d9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.416660] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.669s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 641.417212] env[68217]: DEBUG nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 641.421294] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.906s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 641.425021] env[68217]: INFO nova.compute.claims [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 641.587126] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960753, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.190743} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.587410] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 641.588181] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd25a889-3130-42b9-bbf8-7f9b0d2f5358 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.610952] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Reconfiguring VM instance instance-00000015 to attach disk [datastore1] dfeeed37-8c84-4ecc-87ea-f4239f512fb1/dfeeed37-8c84-4ecc-87ea-f4239f512fb1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 641.611561] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c71130e9-9f5c-4cc6-9e14-98db08dc8cce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.632721] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 641.632721] env[68217]: value = "task-2960754" [ 641.632721] env[68217]: _type = "Task" [ 641.632721] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.640983] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960754, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.845764] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5293cac8-5165-6b98-0622-2ec4ae2f4d9f, 'name': SearchDatastore_Task, 'duration_secs': 0.032757} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 641.846054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 641.846373] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] db4cf157-9511-423c-aa41-433af8d92b48/db4cf157-9511-423c-aa41-433af8d92b48.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 641.846680] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-238f41ec-ebe9-4416-a902-97e31a4ca2a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.856194] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 641.856194] env[68217]: value = "task-2960755" [ 641.856194] env[68217]: _type = "Task" [ 641.856194] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 641.867869] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960755, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 641.927542] env[68217]: DEBUG nova.compute.utils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 641.931411] env[68217]: DEBUG nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 642.144501] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960754, 'name': ReconfigVM_Task, 'duration_secs': 0.342073} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.144819] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Reconfigured VM instance instance-00000015 to attach disk [datastore1] dfeeed37-8c84-4ecc-87ea-f4239f512fb1/dfeeed37-8c84-4ecc-87ea-f4239f512fb1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 642.148200] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-92e57deb-c128-4831-914f-91a1ec740f9d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.154198] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 642.154198] env[68217]: value = "task-2960757" [ 642.154198] env[68217]: _type = "Task" [ 642.154198] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.169608] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960757, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.367641] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960755, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.436964] env[68217]: DEBUG nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 642.676837] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960757, 'name': Rename_Task, 'duration_secs': 0.165212} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.676837] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 642.676837] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3438be68-3acd-40c0-8fcf-0a1e9ef2cb0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.690471] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 642.690471] env[68217]: value = "task-2960758" [ 642.690471] env[68217]: _type = "Task" [ 642.690471] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.708395] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960758, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 642.870585] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960755, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.944193} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 642.873973] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] db4cf157-9511-423c-aa41-433af8d92b48/db4cf157-9511-423c-aa41-433af8d92b48.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 642.874229] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 642.874677] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e5b4d105-31c1-44b7-b9a5-f2b5fe9a90d5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 642.883673] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 642.883673] env[68217]: value = "task-2960759" [ 642.883673] env[68217]: _type = "Task" [ 642.883673] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 642.896113] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960759, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.039522] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c404892b-dba0-4904-b449-daea936cbadc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.048794] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91ebc5c0-325a-4240-8383-1bf84325ef27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.084293] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2649d35f-57ab-4296-8184-cada10631fc0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.093530] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a7a7d2b-0012-4862-95d7-635099592ccb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.109154] env[68217]: DEBUG nova.compute.provider_tree [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 643.202308] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960758, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.395259] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960759, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.10292} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.395540] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 643.396363] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d51de3b-2c54-4791-be10-d464011e3f4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.420545] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Reconfiguring VM instance instance-00000016 to attach disk [datastore1] db4cf157-9511-423c-aa41-433af8d92b48/db4cf157-9511-423c-aa41-433af8d92b48.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 643.420860] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10e666f4-2f18-464d-ae2f-153e5c40dcc1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.442389] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 643.442389] env[68217]: value = "task-2960760" [ 643.442389] env[68217]: _type = "Task" [ 643.442389] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.452901] env[68217]: DEBUG nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 643.454969] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960760, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.488925] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 643.489247] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 643.489436] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 643.489622] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 643.489861] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 643.490133] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 643.490505] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 643.490696] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 643.490919] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 643.491190] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 643.491480] env[68217]: DEBUG nova.virt.hardware [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 643.492720] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e2f23c8-bc3b-4cdb-9603-ebac02cd09ce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.503317] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2b4903-1dd2-4e94-bbf1-bac76f657c92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.522221] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 643.528898] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Creating folder: Project (6dbba2d89fd646a59b1bf858b78a5167). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.529238] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e3122522-201f-40c3-876d-15373fdc57c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.544023] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Created folder: Project (6dbba2d89fd646a59b1bf858b78a5167) in parent group-v594094. [ 643.544023] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Creating folder: Instances. Parent ref: group-v594157. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 643.544023] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40c7e520-8bb0-4d9d-9a7e-cb31580f39b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.555163] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Created folder: Instances in parent group-v594157. [ 643.555163] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 643.555163] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 643.555392] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-88913e26-0481-4ede-bce8-e7be7ae85572 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.578908] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 643.578908] env[68217]: value = "task-2960763" [ 643.578908] env[68217]: _type = "Task" [ 643.578908] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.588159] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960763, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 643.613202] env[68217]: DEBUG nova.scheduler.client.report [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 643.704025] env[68217]: DEBUG oslo_vmware.api [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2960758, 'name': PowerOnVM_Task, 'duration_secs': 0.525925} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.704025] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 643.704025] env[68217]: INFO nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Took 9.04 seconds to spawn the instance on the hypervisor. [ 643.704025] env[68217]: DEBUG nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 643.704025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb102512-0c01-4884-a059-d2c216e34f55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.957695] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960760, 'name': ReconfigVM_Task, 'duration_secs': 0.407005} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 643.958267] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Reconfigured VM instance instance-00000016 to attach disk [datastore1] db4cf157-9511-423c-aa41-433af8d92b48/db4cf157-9511-423c-aa41-433af8d92b48.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 643.961530] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-41f6a158-5d92-4d44-9fec-ea7f952db277 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.973537] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 643.973537] env[68217]: value = "task-2960765" [ 643.973537] env[68217]: _type = "Task" [ 643.973537] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 643.984927] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960765, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.090095] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960763, 'name': CreateVM_Task, 'duration_secs': 0.382454} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.090095] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 644.090226] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.090356] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.090906] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 644.090977] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1c397fb4-a1e7-46f2-a290-e817dc2b54bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.098626] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 644.098626] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522bc1bf-55ee-0602-512a-24a72f71796b" [ 644.098626] env[68217]: _type = "Task" [ 644.098626] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.108911] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522bc1bf-55ee-0602-512a-24a72f71796b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.118947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.699s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.120339] env[68217]: DEBUG nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 644.125767] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.859s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 644.128601] env[68217]: INFO nova.compute.claims [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 644.220631] env[68217]: INFO nova.compute.manager [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Took 49.56 seconds to build instance. [ 644.484636] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960765, 'name': Rename_Task, 'duration_secs': 0.190192} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.484906] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 644.485654] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e202320e-c570-4b8b-9805-ea5da4c8f419 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.493608] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 644.493608] env[68217]: value = "task-2960766" [ 644.493608] env[68217]: _type = "Task" [ 644.493608] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.507454] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.611365] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522bc1bf-55ee-0602-512a-24a72f71796b, 'name': SearchDatastore_Task, 'duration_secs': 0.012033} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.611713] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 644.611950] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 644.612205] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 644.612350] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 644.612527] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 644.612804] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0b45c290-46b8-4b3e-a566-4edde7a86dd4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.625173] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 644.625173] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 644.625696] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e578c489-ba8b-4e99-9d4d-75fa385659e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.636543] env[68217]: DEBUG nova.compute.utils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 644.643334] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 644.643334] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528f4b1e-691b-a755-7e86-d675834cfe13" [ 644.643334] env[68217]: _type = "Task" [ 644.643334] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.643694] env[68217]: DEBUG nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 644.643925] env[68217]: DEBUG nova.network.neutron [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 644.656045] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528f4b1e-691b-a755-7e86-d675834cfe13, 'name': SearchDatastore_Task, 'duration_secs': 0.012709} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 644.656924] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-863fcfd0-a7dc-408b-8dc8-c660de2e7fa9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 644.663494] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 644.663494] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226fe0d-6dbb-722d-928f-e95aebf75eb6" [ 644.663494] env[68217]: _type = "Task" [ 644.663494] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 644.675253] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226fe0d-6dbb-722d-928f-e95aebf75eb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 644.708787] env[68217]: DEBUG nova.policy [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '91772cce5a7343d7bba596c00bc583f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0a00814fe3eb4f1fa647f7876b11e86f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 644.722228] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f902d055-1137-479f-80f2-7ea791489a48 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.386s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 644.991331] env[68217]: DEBUG nova.network.neutron [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Successfully created port: e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 645.005384] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960766, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.071547] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "d0d8ed27-003e-43e2-8a07-041420a2c758" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.071759] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 645.149344] env[68217]: DEBUG nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 645.180214] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226fe0d-6dbb-722d-928f-e95aebf75eb6, 'name': SearchDatastore_Task, 'duration_secs': 0.012327} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.180214] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 645.180505] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 645.180773] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f7d0c36-dadc-4b81-91e9-7aa2a489fdee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.194851] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 645.194851] env[68217]: value = "task-2960767" [ 645.194851] env[68217]: _type = "Task" [ 645.194851] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.212574] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960767, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.227035] env[68217]: DEBUG nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 645.512024] env[68217]: DEBUG oslo_vmware.api [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960766, 'name': PowerOnVM_Task, 'duration_secs': 0.578562} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.512024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 645.512024] env[68217]: INFO nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Took 8.04 seconds to spawn the instance on the hypervisor. [ 645.512024] env[68217]: DEBUG nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 645.513574] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9373c83a-74aa-47ba-8751-b0afd47b4f48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.708827] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960767, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.489675} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 645.708827] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 645.709162] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 645.711731] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-96313f1a-f748-469f-9fdd-6573519954d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.720068] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 645.720068] env[68217]: value = "task-2960768" [ 645.720068] env[68217]: _type = "Task" [ 645.720068] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 645.733847] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960768, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 645.748589] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 645.775430] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c519c7ac-2225-4a7e-8f71-9022c58fa6f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.785122] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bacc63-6dbe-47b3-9d27-61b5034c322b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.816249] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cff028b-ac94-4eed-9ac9-960513d431ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.824852] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6098e9b3-5ba3-43c5-a090-e083a3ead966 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 645.841660] env[68217]: DEBUG nova.compute.provider_tree [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.037922] env[68217]: INFO nova.compute.manager [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Took 48.71 seconds to build instance. [ 646.161540] env[68217]: DEBUG nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 646.186891] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 646.187414] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 646.187659] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 646.188304] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 646.188550] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 646.188784] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 646.189081] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 646.189337] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 646.189660] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 646.189920] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 646.190176] env[68217]: DEBUG nova.virt.hardware [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 646.191560] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78205832-19ef-4ab7-a124-51cb25464276 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.201284] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec5d6d0-34e9-4acc-946e-952b0e871ffb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.229915] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960768, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074738} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 646.230300] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 646.231314] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9fefdc-7468-44ac-8146-3816698ae411 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.260107] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 646.260107] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2a35183-eb05-4727-9001-0edd9becd096 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.283431] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 646.283431] env[68217]: value = "task-2960770" [ 646.283431] env[68217]: _type = "Task" [ 646.283431] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 646.292683] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960770, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.344533] env[68217]: DEBUG nova.scheduler.client.report [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 646.539852] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a8cf1a53-aee0-44f2-bea9-9b3cb7580eb8 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "db4cf157-9511-423c-aa41-433af8d92b48" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.488s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.545977] env[68217]: DEBUG nova.compute.manager [req-982ecaf6-3aeb-4121-8c05-1657809d2107 req-899e196f-911c-47dd-b666-5385c8340976 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Received event network-vif-plugged-e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 646.545977] env[68217]: DEBUG oslo_concurrency.lockutils [req-982ecaf6-3aeb-4121-8c05-1657809d2107 req-899e196f-911c-47dd-b666-5385c8340976 service nova] Acquiring lock "fcddfd72-a130-4efc-82cb-1fb22d33d684-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.545977] env[68217]: DEBUG oslo_concurrency.lockutils [req-982ecaf6-3aeb-4121-8c05-1657809d2107 req-899e196f-911c-47dd-b666-5385c8340976 service nova] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.545977] env[68217]: DEBUG oslo_concurrency.lockutils [req-982ecaf6-3aeb-4121-8c05-1657809d2107 req-899e196f-911c-47dd-b666-5385c8340976 service nova] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.545977] env[68217]: DEBUG nova.compute.manager [req-982ecaf6-3aeb-4121-8c05-1657809d2107 req-899e196f-911c-47dd-b666-5385c8340976 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] No waiting events found dispatching network-vif-plugged-e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 646.550172] env[68217]: WARNING nova.compute.manager [req-982ecaf6-3aeb-4121-8c05-1657809d2107 req-899e196f-911c-47dd-b666-5385c8340976 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Received unexpected event network-vif-plugged-e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 for instance with vm_state building and task_state spawning. [ 646.550172] env[68217]: DEBUG nova.network.neutron [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Successfully updated port: e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 646.633123] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 646.633622] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.794370] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960770, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 646.849947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.725s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 646.850497] env[68217]: DEBUG nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 646.852998] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.939s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 646.853225] env[68217]: DEBUG nova.objects.instance [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lazy-loading 'resources' on Instance uuid bbd282ea-58aa-47b8-aa82-283a55ac1b29 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 647.043347] env[68217]: DEBUG nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 647.054080] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "refresh_cache-fcddfd72-a130-4efc-82cb-1fb22d33d684" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.054273] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired lock "refresh_cache-fcddfd72-a130-4efc-82cb-1fb22d33d684" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.054415] env[68217]: DEBUG nova.network.neutron [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 647.298553] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960770, 'name': ReconfigVM_Task, 'duration_secs': 0.842399} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.298870] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 647.299513] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0154fcaa-2462-4a66-9683-cb08f9fb2e76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.306804] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 647.306804] env[68217]: value = "task-2960771" [ 647.306804] env[68217]: _type = "Task" [ 647.306804] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.317617] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960771, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.359097] env[68217]: DEBUG nova.compute.utils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 647.366731] env[68217]: DEBUG nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 647.366731] env[68217]: DEBUG nova.network.neutron [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 647.414103] env[68217]: DEBUG nova.policy [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4e30c52618c445e0b1c602b71b7a4f01', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3f5fa72e1aad4470b15e81061d8b4b7d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 647.568290] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 647.595633] env[68217]: DEBUG nova.network.neutron [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 647.719157] env[68217]: DEBUG nova.compute.manager [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Received event network-changed-8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 647.719157] env[68217]: DEBUG nova.compute.manager [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Refreshing instance network info cache due to event network-changed-8fbb4d07-34f6-4f5c-8057-fd5de704aba2. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 647.719157] env[68217]: DEBUG oslo_concurrency.lockutils [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] Acquiring lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.719157] env[68217]: DEBUG oslo_concurrency.lockutils [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] Acquired lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 647.719157] env[68217]: DEBUG nova.network.neutron [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Refreshing network info cache for port 8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 647.739099] env[68217]: DEBUG nova.network.neutron [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Successfully created port: a26f19ed-bde8-4dee-bd59-7a0ed95f7659 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 647.821918] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960771, 'name': Rename_Task, 'duration_secs': 0.173808} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 647.822211] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 647.826039] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4f287cbc-6437-409e-9481-0b5acdac6c90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.834103] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 647.834103] env[68217]: value = "task-2960772" [ 647.834103] env[68217]: _type = "Task" [ 647.834103] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 647.841139] env[68217]: DEBUG nova.network.neutron [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Updating instance_info_cache with network_info: [{"id": "e7076bf7-0c58-414f-ad86-6ae8c8b8cc96", "address": "fa:16:3e:21:16:f6", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7076bf7-0c", "ovs_interfaceid": "e7076bf7-0c58-414f-ad86-6ae8c8b8cc96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.853764] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960772, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 647.866983] env[68217]: DEBUG nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 648.025065] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59eb2ad-3509-49b4-8b44-3af21e82f7c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.033162] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bae09ac-6928-4e60-a8e7-21468d39b511 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.068611] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa083cbe-6549-49fc-bfe3-6be4de7f2ea2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.076682] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83bf17d0-68e3-42d1-a9be-54f4a6080b1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.091338] env[68217]: DEBUG nova.compute.provider_tree [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 648.346802] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Releasing lock "refresh_cache-fcddfd72-a130-4efc-82cb-1fb22d33d684" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 648.347196] env[68217]: DEBUG nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Instance network_info: |[{"id": "e7076bf7-0c58-414f-ad86-6ae8c8b8cc96", "address": "fa:16:3e:21:16:f6", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7076bf7-0c", "ovs_interfaceid": "e7076bf7-0c58-414f-ad86-6ae8c8b8cc96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 648.347492] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960772, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.351295] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:21:16:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e7076bf7-0c58-414f-ad86-6ae8c8b8cc96', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 648.361073] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 648.362532] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 648.362532] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-df762c4a-e878-43f9-9d2e-833046ac3f51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.388980] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 648.388980] env[68217]: value = "task-2960773" [ 648.388980] env[68217]: _type = "Task" [ 648.388980] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 648.404023] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960773, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 648.595264] env[68217]: DEBUG nova.scheduler.client.report [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 648.639478] env[68217]: DEBUG nova.network.neutron [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Updated VIF entry in instance network info cache for port 8fbb4d07-34f6-4f5c-8057-fd5de704aba2. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 648.639795] env[68217]: DEBUG nova.network.neutron [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Updating instance_info_cache with network_info: [{"id": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "address": "fa:16:3e:ef:be:20", "network": {"id": "366acb23-46bf-4747-84d7-c9228b0e4d91", "bridge": "br-int", "label": "tempest-ServersTestManualDisk-574329409-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.250", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "dc0671abf27a426d9cee1db45efe4757", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e1c9bb98-73a9-48eb-856e-a541afe9b07b", "external-id": "nsx-vlan-transportzone-755", "segmentation_id": 755, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8fbb4d07-34", "ovs_interfaceid": "8fbb4d07-34f6-4f5c-8057-fd5de704aba2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 648.772340] env[68217]: DEBUG nova.compute.manager [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Received event network-changed-e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 648.772607] env[68217]: DEBUG nova.compute.manager [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Refreshing instance network info cache due to event network-changed-e7076bf7-0c58-414f-ad86-6ae8c8b8cc96. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 648.772847] env[68217]: DEBUG oslo_concurrency.lockutils [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] Acquiring lock "refresh_cache-fcddfd72-a130-4efc-82cb-1fb22d33d684" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.772995] env[68217]: DEBUG oslo_concurrency.lockutils [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] Acquired lock "refresh_cache-fcddfd72-a130-4efc-82cb-1fb22d33d684" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.773163] env[68217]: DEBUG nova.network.neutron [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Refreshing network info cache for port e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 648.844585] env[68217]: DEBUG oslo_vmware.api [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960772, 'name': PowerOnVM_Task, 'duration_secs': 0.878394} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.844935] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 648.845605] env[68217]: INFO nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Took 5.39 seconds to spawn the instance on the hypervisor. [ 648.845605] env[68217]: DEBUG nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 648.846018] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78677974-e83e-42e6-8554-58b1836b86cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 648.886785] env[68217]: DEBUG nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 648.903289] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960773, 'name': CreateVM_Task, 'duration_secs': 0.408059} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 648.903470] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 648.904129] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.904290] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 648.904644] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 648.905198] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f7ba7bc-5ded-4ef3-8686-51cf1fc8f880 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.606975] env[68217]: DEBUG nova.network.neutron [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Successfully updated port: a26f19ed-bde8-4dee-bd59-7a0ed95f7659 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 649.610065] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 649.610304] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.610473] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 649.610658] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.610801] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 649.610942] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 649.611196] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 649.611371] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 649.611561] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 649.611729] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 649.611909] env[68217]: DEBUG nova.virt.hardware [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 649.613914] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.761s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 649.616147] env[68217]: DEBUG oslo_concurrency.lockutils [req-08d50fb9-5e81-49c4-bd66-7b71ae4e57a4 req-8e7255ea-bba7-4f53-8227-bddf056b369a service nova] Releasing lock "refresh_cache-db4cf157-9511-423c-aa41-433af8d92b48" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.622202] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36234748-6490-4ff5-b188-62c9576d3538 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.625173] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 649.625173] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522c00f3-2303-100e-f3d6-e62f88359c0e" [ 649.625173] env[68217]: _type = "Task" [ 649.625173] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.627709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 33.347s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.627709] env[68217]: DEBUG nova.objects.instance [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lazy-loading 'resources' on Instance uuid 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 649.629705] env[68217]: INFO nova.compute.manager [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Took 38.67 seconds to build instance. [ 649.640120] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd67fd6-6caa-4cbe-97b4-37d340a621e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.647444] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522c00f3-2303-100e-f3d6-e62f88359c0e, 'name': SearchDatastore_Task, 'duration_secs': 0.010831} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 649.648185] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 649.648476] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 649.648724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.648861] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 649.649352] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 649.650098] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-add990fb-c101-4be8-a21d-c86890962790 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.660293] env[68217]: INFO nova.scheduler.client.report [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Deleted allocations for instance bbd282ea-58aa-47b8-aa82-283a55ac1b29 [ 649.672872] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 649.673029] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 649.673879] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-281b8e06-0ef7-444f-b010-0e7dca31f26c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.682951] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 649.682951] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d82f3a-7f88-9980-689e-972a00a12c29" [ 649.682951] env[68217]: _type = "Task" [ 649.682951] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 649.691532] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d82f3a-7f88-9980-689e-972a00a12c29, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 649.807115] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Acquiring lock "0552d616-a406-4dfa-8a70-82f39fb98bbc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 649.807310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 649.867109] env[68217]: DEBUG nova.network.neutron [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Updated VIF entry in instance network info cache for port e7076bf7-0c58-414f-ad86-6ae8c8b8cc96. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 649.867514] env[68217]: DEBUG nova.network.neutron [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Updating instance_info_cache with network_info: [{"id": "e7076bf7-0c58-414f-ad86-6ae8c8b8cc96", "address": "fa:16:3e:21:16:f6", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.218", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape7076bf7-0c", "ovs_interfaceid": "e7076bf7-0c58-414f-ad86-6ae8c8b8cc96", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.128435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "refresh_cache-f748cf37-6605-49a2-a418-51667a0fac4a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.129131] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquired lock "refresh_cache-f748cf37-6605-49a2-a418-51667a0fac4a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 650.129131] env[68217]: DEBUG nova.network.neutron [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 650.138081] env[68217]: DEBUG oslo_concurrency.lockutils [None req-56750ab8-44dc-4b43-a61e-da9e07682569 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.084s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.169386] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3f80b74a-20b8-4a1f-9902-1a00ac6e35ae tempest-InstanceActionsV221TestJSON-830674348 tempest-InstanceActionsV221TestJSON-830674348-project-member] Lock "bbd282ea-58aa-47b8-aa82-283a55ac1b29" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.654s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.197193] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d82f3a-7f88-9980-689e-972a00a12c29, 'name': SearchDatastore_Task, 'duration_secs': 0.01033} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.200646] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4c71ff4d-fda7-486b-b137-79dde0c5366e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.207052] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 650.207052] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bedbaa-6bfb-0ca6-d25d-5398f4ba01ad" [ 650.207052] env[68217]: _type = "Task" [ 650.207052] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.218867] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bedbaa-6bfb-0ca6-d25d-5398f4ba01ad, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.267864] env[68217]: INFO nova.compute.manager [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Rebuilding instance [ 650.310284] env[68217]: DEBUG nova.compute.manager [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 650.311144] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046492e5-f9fe-4640-a62a-70eccae2e025 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.369861] env[68217]: DEBUG oslo_concurrency.lockutils [req-26515b79-033d-4701-aed8-894aaff32fd4 req-72760290-1a50-4d72-8443-6dff5d63ad04 service nova] Releasing lock "refresh_cache-fcddfd72-a130-4efc-82cb-1fb22d33d684" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.642545] env[68217]: DEBUG nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 650.687398] env[68217]: DEBUG nova.network.neutron [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 650.705050] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43380d4-3ed4-464c-a5e1-bdee42537ec2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.721950] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8485cd71-2182-4620-8766-938ef6eeccd8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.725495] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bedbaa-6bfb-0ca6-d25d-5398f4ba01ad, 'name': SearchDatastore_Task, 'duration_secs': 0.0114} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 650.725696] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 650.725956] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] fcddfd72-a130-4efc-82cb-1fb22d33d684/fcddfd72-a130-4efc-82cb-1fb22d33d684.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 650.726575] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1fe98fac-d3a3-4937-91e5-e0d7f128710e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.761013] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b52361f9-1aa0-4203-b5af-c08680a39738 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.765357] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 650.765357] env[68217]: value = "task-2960774" [ 650.765357] env[68217]: _type = "Task" [ 650.765357] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 650.773712] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-853638f0-4415-44cc-b87d-15194861fedf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.780881] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 650.794676] env[68217]: DEBUG nova.compute.provider_tree [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.824462] env[68217]: DEBUG nova.compute.manager [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Received event network-vif-plugged-a26f19ed-bde8-4dee-bd59-7a0ed95f7659 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 650.824560] env[68217]: DEBUG oslo_concurrency.lockutils [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] Acquiring lock "f748cf37-6605-49a2-a418-51667a0fac4a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 650.824757] env[68217]: DEBUG oslo_concurrency.lockutils [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] Lock "f748cf37-6605-49a2-a418-51667a0fac4a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 650.824927] env[68217]: DEBUG oslo_concurrency.lockutils [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] Lock "f748cf37-6605-49a2-a418-51667a0fac4a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 650.825192] env[68217]: DEBUG nova.compute.manager [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] No waiting events found dispatching network-vif-plugged-a26f19ed-bde8-4dee-bd59-7a0ed95f7659 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 650.825393] env[68217]: WARNING nova.compute.manager [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Received unexpected event network-vif-plugged-a26f19ed-bde8-4dee-bd59-7a0ed95f7659 for instance with vm_state building and task_state spawning. [ 650.825559] env[68217]: DEBUG nova.compute.manager [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Received event network-changed-a26f19ed-bde8-4dee-bd59-7a0ed95f7659 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 650.825706] env[68217]: DEBUG nova.compute.manager [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Refreshing instance network info cache due to event network-changed-a26f19ed-bde8-4dee-bd59-7a0ed95f7659. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 650.825868] env[68217]: DEBUG oslo_concurrency.lockutils [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] Acquiring lock "refresh_cache-f748cf37-6605-49a2-a418-51667a0fac4a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 650.959780] env[68217]: DEBUG nova.network.neutron [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Updating instance_info_cache with network_info: [{"id": "a26f19ed-bde8-4dee-bd59-7a0ed95f7659", "address": "fa:16:3e:de:54:d9", "network": {"id": "eab0483d-ef9b-44e9-b646-ddd8f146d29c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1099907994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f5fa72e1aad4470b15e81061d8b4b7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa26f19ed-bd", "ovs_interfaceid": "a26f19ed-bde8-4dee-bd59-7a0ed95f7659", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.168027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 651.280183] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.481309} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.280183] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] fcddfd72-a130-4efc-82cb-1fb22d33d684/fcddfd72-a130-4efc-82cb-1fb22d33d684.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 651.280183] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 651.280183] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d48c5d49-17ab-4a2c-aaea-b9707e43e6e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.290160] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 651.290160] env[68217]: value = "task-2960775" [ 651.290160] env[68217]: _type = "Task" [ 651.290160] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.298482] env[68217]: DEBUG nova.scheduler.client.report [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 651.308028] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960775, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.337973] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 651.338286] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b794baf8-866b-42c2-9411-998a97f7ece8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.348685] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 651.348685] env[68217]: value = "task-2960776" [ 651.348685] env[68217]: _type = "Task" [ 651.348685] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.361152] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960776, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.463652] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Releasing lock "refresh_cache-f748cf37-6605-49a2-a418-51667a0fac4a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 651.464014] env[68217]: DEBUG nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Instance network_info: |[{"id": "a26f19ed-bde8-4dee-bd59-7a0ed95f7659", "address": "fa:16:3e:de:54:d9", "network": {"id": "eab0483d-ef9b-44e9-b646-ddd8f146d29c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1099907994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f5fa72e1aad4470b15e81061d8b4b7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa26f19ed-bd", "ovs_interfaceid": "a26f19ed-bde8-4dee-bd59-7a0ed95f7659", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 651.464475] env[68217]: DEBUG oslo_concurrency.lockutils [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] Acquired lock "refresh_cache-f748cf37-6605-49a2-a418-51667a0fac4a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 651.464739] env[68217]: DEBUG nova.network.neutron [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Refreshing network info cache for port a26f19ed-bde8-4dee-bd59-7a0ed95f7659 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 651.467326] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:54:d9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0c7eaa8-06f3-40c3-93ae-7593486eb870', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a26f19ed-bde8-4dee-bd59-7a0ed95f7659', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 651.475744] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Creating folder: Project (3f5fa72e1aad4470b15e81061d8b4b7d). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 651.479623] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-39ca2d53-1fd2-4ba4-9c79-0e704357e684 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.494849] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Created folder: Project (3f5fa72e1aad4470b15e81061d8b4b7d) in parent group-v594094. [ 651.495271] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Creating folder: Instances. Parent ref: group-v594161. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 651.495885] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd4f0337-3b42-4734-928c-44a19cc4ffac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.512814] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Created folder: Instances in parent group-v594161. [ 651.512814] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 651.512814] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 651.512814] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f3128e7-9d32-4924-9734-7e9ababc27ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.536979] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 651.536979] env[68217]: value = "task-2960779" [ 651.536979] env[68217]: _type = "Task" [ 651.536979] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.547227] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960779, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.801290] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960775, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07539} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.801662] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 651.802499] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c14717-5e8c-485e-8e3f-372965664893 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.819706] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.192s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 651.831612] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Reconfiguring VM instance instance-00000018 to attach disk [datastore2] fcddfd72-a130-4efc-82cb-1fb22d33d684/fcddfd72-a130-4efc-82cb-1fb22d33d684.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 651.832875] env[68217]: DEBUG nova.network.neutron [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Updated VIF entry in instance network info cache for port a26f19ed-bde8-4dee-bd59-7a0ed95f7659. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 651.833330] env[68217]: DEBUG nova.network.neutron [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Updating instance_info_cache with network_info: [{"id": "a26f19ed-bde8-4dee-bd59-7a0ed95f7659", "address": "fa:16:3e:de:54:d9", "network": {"id": "eab0483d-ef9b-44e9-b646-ddd8f146d29c", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-1099907994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3f5fa72e1aad4470b15e81061d8b4b7d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0c7eaa8-06f3-40c3-93ae-7593486eb870", "external-id": "nsx-vlan-transportzone-20", "segmentation_id": 20, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa26f19ed-bd", "ovs_interfaceid": "a26f19ed-bde8-4dee-bd59-7a0ed95f7659", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.834815] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.022s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 651.835226] env[68217]: DEBUG nova.objects.instance [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lazy-loading 'resources' on Instance uuid ae5fa3f4-e487-40ed-9ca4-12a6f9713eba {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 651.836411] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6f6dbce-0288-4874-a452-ce4eb7c5d230 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.862097] env[68217]: INFO nova.scheduler.client.report [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Deleted allocations for instance 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5 [ 651.869906] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 651.869906] env[68217]: value = "task-2960780" [ 651.869906] env[68217]: _type = "Task" [ 651.869906] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.877568] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960776, 'name': PowerOffVM_Task, 'duration_secs': 0.208462} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 651.877568] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 651.877688] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 651.878506] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5126ff58-948e-4e85-90ba-be9e5d52c4cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.885555] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960780, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 651.889979] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 651.890153] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c7b394eb-7831-4d19-9974-47582a52a151 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.927577] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 651.927797] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 651.927979] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Deleting the datastore file [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 651.928592] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e1608fed-8525-4363-8627-8c0ed6cc5274 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 651.939971] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 651.939971] env[68217]: value = "task-2960782" [ 651.939971] env[68217]: _type = "Task" [ 651.939971] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 651.950025] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960782, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.050248] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960779, 'name': CreateVM_Task, 'duration_secs': 0.409354} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.050605] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 652.051506] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.051506] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.051848] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 652.051961] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03aff360-6c80-4d9e-bb2c-3ab2a9140230 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.058019] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 652.058019] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522b7be3-f101-1aee-2ace-4d25503935e6" [ 652.058019] env[68217]: _type = "Task" [ 652.058019] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.067429] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522b7be3-f101-1aee-2ace-4d25503935e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.360802] env[68217]: DEBUG oslo_concurrency.lockutils [req-3899b085-52be-42fd-a2fb-8ca2b1867941 req-3c149cdd-5385-4049-a3fc-c5e86edc6e71 service nova] Releasing lock "refresh_cache-f748cf37-6605-49a2-a418-51667a0fac4a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.372925] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f4464596-c9bb-4d18-9861-6ec6a6adaac7 tempest-ServerDiagnosticsV248Test-1809217421 tempest-ServerDiagnosticsV248Test-1809217421-project-member] Lock "0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.509s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 652.380926] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960780, 'name': ReconfigVM_Task, 'duration_secs': 0.3623} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.380926] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Reconfigured VM instance instance-00000018 to attach disk [datastore2] fcddfd72-a130-4efc-82cb-1fb22d33d684/fcddfd72-a130-4efc-82cb-1fb22d33d684.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 652.381548] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-621d3497-8025-41c7-bb3f-5ef0860fadba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.390575] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 652.390575] env[68217]: value = "task-2960783" [ 652.390575] env[68217]: _type = "Task" [ 652.390575] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.410506] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960783, 'name': Rename_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.458657] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960782, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.108828} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.463770] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 652.464157] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 652.464759] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 652.570369] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522b7be3-f101-1aee-2ace-4d25503935e6, 'name': SearchDatastore_Task, 'duration_secs': 0.017524} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.573175] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 652.573502] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 652.573781] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 652.573962] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 652.574187] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 652.574690] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-469c9754-3f48-4006-addf-89b61861eec4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.584744] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 652.585096] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 652.589187] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a4dc1c59-4c83-4ac6-9a69-ca16a097c9da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.597902] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 652.597902] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5238e795-1af2-4b44-21d6-4f2123f36268" [ 652.597902] env[68217]: _type = "Task" [ 652.597902] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.606271] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5238e795-1af2-4b44-21d6-4f2123f36268, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.906857] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960783, 'name': Rename_Task, 'duration_secs': 0.15639} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 652.906857] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 652.906857] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-298b2cb4-a804-4964-8656-51792c3bf9e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.917620] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 652.917620] env[68217]: value = "task-2960784" [ 652.917620] env[68217]: _type = "Task" [ 652.917620] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 652.926426] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960784, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 652.955883] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dfa1978-8726-4d0a-b4c8-5edc69ac5109 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.966492] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132ae8e3-98b4-4105-8b13-3ce081dc7584 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.006456] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1dc708e-4187-490d-bde7-f84f93617814 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.015184] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442d1ae5-c450-487f-9c63-0ae2d5d782ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.031253] env[68217]: DEBUG nova.compute.provider_tree [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 653.109066] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5238e795-1af2-4b44-21d6-4f2123f36268, 'name': SearchDatastore_Task, 'duration_secs': 0.010541} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.109938] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a84e0c8-c8e7-449d-9056-82d2745b7a97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.117563] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 653.117563] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529f031c-43b1-5743-a52d-8913bd87e782" [ 653.117563] env[68217]: _type = "Task" [ 653.117563] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.126901] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529f031c-43b1-5743-a52d-8913bd87e782, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.428862] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960784, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.445528] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.445764] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.517491] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 653.517729] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 653.517963] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 653.518211] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 653.518398] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 653.518586] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 653.518839] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 653.519044] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 653.519269] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 653.519482] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 653.519703] env[68217]: DEBUG nova.virt.hardware [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 653.521549] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4ae5ad4-1016-49fd-a2ef-f7c9abc3fde5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.534196] env[68217]: DEBUG nova.scheduler.client.report [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 653.540218] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a802a73a-d397-4315-b2f6-107613c8bb52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.557327] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 653.564154] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 653.565106] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 653.565384] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c48752e9-67eb-4fde-ba2a-0f34e46b9066 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.588095] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 653.588095] env[68217]: value = "task-2960785" [ 653.588095] env[68217]: _type = "Task" [ 653.588095] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.595746] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960785, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.630554] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529f031c-43b1-5743-a52d-8913bd87e782, 'name': SearchDatastore_Task, 'duration_secs': 0.012376} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.630827] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 653.631086] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] f748cf37-6605-49a2-a418-51667a0fac4a/f748cf37-6605-49a2-a418-51667a0fac4a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 653.631341] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3631a65d-3a5a-4fd0-9c08-8a5c8e89712e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.640505] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 653.640505] env[68217]: value = "task-2960786" [ 653.640505] env[68217]: _type = "Task" [ 653.640505] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 653.648928] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960786, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 653.930168] env[68217]: DEBUG oslo_vmware.api [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960784, 'name': PowerOnVM_Task, 'duration_secs': 0.520182} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 653.930439] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 653.930708] env[68217]: INFO nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Took 7.77 seconds to spawn the instance on the hypervisor. [ 653.931017] env[68217]: DEBUG nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 653.932079] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-519388de-940e-41cc-85f8-dd8f29dadc52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 653.956016] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.956623] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.957030] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.957363] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.957821] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.957995] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 653.958227] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 653.958605] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 654.046556] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.211s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.050712] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.035s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 654.053361] env[68217]: INFO nova.compute.claims [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 654.087216] env[68217]: INFO nova.scheduler.client.report [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Deleted allocations for instance ae5fa3f4-e487-40ed-9ca4-12a6f9713eba [ 654.101571] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960785, 'name': CreateVM_Task, 'duration_secs': 0.319058} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.101571] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 654.101571] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.102038] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.102364] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 654.102634] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cbd8757-917c-4f92-a58a-30b1dba25c37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.109397] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 654.109397] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527e9aa1-04b5-994e-6463-bf04b57133c0" [ 654.109397] env[68217]: _type = "Task" [ 654.109397] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.119724] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527e9aa1-04b5-994e-6463-bf04b57133c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.152073] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960786, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.451445] env[68217]: INFO nova.compute.manager [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Took 42.97 seconds to build instance. [ 654.464065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 654.599249] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8fc4b-ef46-4627-8c67-fc3b8c8e1f8c tempest-ServersListShow2100Test-1688877143 tempest-ServersListShow2100Test-1688877143-project-member] Lock "ae5fa3f4-e487-40ed-9ca4-12a6f9713eba" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 42.140s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 654.620705] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527e9aa1-04b5-994e-6463-bf04b57133c0, 'name': SearchDatastore_Task, 'duration_secs': 0.058262} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.621020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 654.621974] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 654.622259] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 654.622405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 654.622581] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 654.622841] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b7f3dc8f-c862-4961-8b2b-28560bab01c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.641566] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 654.641717] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 654.645736] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da4800bf-e85d-42d6-a066-5efffd4ba6a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.652573] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 654.652573] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5283dbd3-bef0-e4ac-9d9b-961a5b2f3867" [ 654.652573] env[68217]: _type = "Task" [ 654.652573] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.655830] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960786, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584129} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 654.659222] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] f748cf37-6605-49a2-a418-51667a0fac4a/f748cf37-6605-49a2-a418-51667a0fac4a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 654.659515] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 654.659767] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-38aee2fe-ccb9-4345-a4d7-b2e7eac09242 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.667434] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5283dbd3-bef0-e4ac-9d9b-961a5b2f3867, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.668713] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 654.668713] env[68217]: value = "task-2960787" [ 654.668713] env[68217]: _type = "Task" [ 654.668713] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 654.679383] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960787, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 654.958037] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f0b02cf9-c857-47cb-b5bb-5f21a8885c31 tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 63.945s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 655.171272] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5283dbd3-bef0-e4ac-9d9b-961a5b2f3867, 'name': SearchDatastore_Task, 'duration_secs': 0.031574} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.178031] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e021ff1-f883-48bd-a69c-3bcc32c7f9d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.188026] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960787, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069869} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.188787] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 655.189758] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 655.189758] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523526e7-5e2e-b56a-1b28-d0a1c2460ece" [ 655.189758] env[68217]: _type = "Task" [ 655.189758] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.190110] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74bc9e60-30e4-4195-aad7-191dc6479b48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.205362] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523526e7-5e2e-b56a-1b28-d0a1c2460ece, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.227114] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Reconfiguring VM instance instance-00000019 to attach disk [datastore2] f748cf37-6605-49a2-a418-51667a0fac4a/f748cf37-6605-49a2-a418-51667a0fac4a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 655.229954] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-136fefbd-2786-4cd4-866c-65c274a9f550 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.253975] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 655.253975] env[68217]: value = "task-2960788" [ 655.253975] env[68217]: _type = "Task" [ 655.253975] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.266074] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960788, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.382088] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3697c76d-d27c-46f7-b2e0-1fcc4975a268 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.391960] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-765fc552-8679-4c94-9819-5bfac11d4073 tempest-ServersAdminNegativeTestJSON-1478365920 tempest-ServersAdminNegativeTestJSON-1478365920-project-admin] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Suspending the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 655.392436] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-464c4238-42a0-4d71-b2cd-b08bca96ffb0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.400623] env[68217]: DEBUG oslo_vmware.api [None req-765fc552-8679-4c94-9819-5bfac11d4073 tempest-ServersAdminNegativeTestJSON-1478365920 tempest-ServersAdminNegativeTestJSON-1478365920-project-admin] Waiting for the task: (returnval){ [ 655.400623] env[68217]: value = "task-2960789" [ 655.400623] env[68217]: _type = "Task" [ 655.400623] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.409232] env[68217]: DEBUG oslo_vmware.api [None req-765fc552-8679-4c94-9819-5bfac11d4073 tempest-ServersAdminNegativeTestJSON-1478365920 tempest-ServersAdminNegativeTestJSON-1478365920-project-admin] Task: {'id': task-2960789, 'name': SuspendVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.459507] env[68217]: DEBUG nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 655.695335] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b3c4df-cc05-4fcf-8e94-b21f6f6c4f33 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.713862] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523526e7-5e2e-b56a-1b28-d0a1c2460ece, 'name': SearchDatastore_Task, 'duration_secs': 0.05295} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 655.714878] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c6de5c-afc3-46cb-b400-286e076ac1e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.719073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 655.719378] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 655.719671] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-08c63e91-da5f-4daa-934f-c45429a5dcba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.755020] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37a2653d-f123-4495-b20c-efb410078dae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.755020] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 655.755020] env[68217]: value = "task-2960790" [ 655.755020] env[68217]: _type = "Task" [ 655.755020] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 655.764844] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3aa5eb8-ffa6-4102-8b2a-6cc3edd2fa3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 655.772753] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960790, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.785095] env[68217]: DEBUG nova.compute.provider_tree [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.789261] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960788, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.911615] env[68217]: DEBUG oslo_vmware.api [None req-765fc552-8679-4c94-9819-5bfac11d4073 tempest-ServersAdminNegativeTestJSON-1478365920 tempest-ServersAdminNegativeTestJSON-1478365920-project-admin] Task: {'id': task-2960789, 'name': SuspendVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 655.983782] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 656.267475] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960790, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491143} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.268155] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 656.268427] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 656.268668] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0979123-68d7-402b-956e-b8789c925d3e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.276021] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960788, 'name': ReconfigVM_Task, 'duration_secs': 0.595012} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.276021] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Reconfigured VM instance instance-00000019 to attach disk [datastore2] f748cf37-6605-49a2-a418-51667a0fac4a/f748cf37-6605-49a2-a418-51667a0fac4a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 656.276021] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-65882580-8732-40ee-8d8c-9c166dc7a9df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.282118] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 656.282118] env[68217]: value = "task-2960791" [ 656.282118] env[68217]: _type = "Task" [ 656.282118] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.283973] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 656.283973] env[68217]: value = "task-2960792" [ 656.283973] env[68217]: _type = "Task" [ 656.283973] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.293188] env[68217]: DEBUG nova.scheduler.client.report [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 656.294798] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960791, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.309197] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960792, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.411440] env[68217]: DEBUG oslo_vmware.api [None req-765fc552-8679-4c94-9819-5bfac11d4073 tempest-ServersAdminNegativeTestJSON-1478365920 tempest-ServersAdminNegativeTestJSON-1478365920-project-admin] Task: {'id': task-2960789, 'name': SuspendVM_Task, 'duration_secs': 0.878555} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.411995] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-765fc552-8679-4c94-9819-5bfac11d4073 tempest-ServersAdminNegativeTestJSON-1478365920 tempest-ServersAdminNegativeTestJSON-1478365920-project-admin] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Suspended the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 656.411995] env[68217]: DEBUG nova.compute.manager [None req-765fc552-8679-4c94-9819-5bfac11d4073 tempest-ServersAdminNegativeTestJSON-1478365920 tempest-ServersAdminNegativeTestJSON-1478365920-project-admin] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 656.412665] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1c4d73-c0c4-4b4e-9fc7-d5087340998e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.791212] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960791, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.314346} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.791815] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 656.792628] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d457f57a-53fe-43f2-915d-ac062747c371 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.798137] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.748s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 656.798610] env[68217]: DEBUG nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 656.801101] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960792, 'name': Rename_Task, 'duration_secs': 0.375631} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 656.801634] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 33.401s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 656.801810] env[68217]: DEBUG nova.objects.instance [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 656.805096] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 656.820597] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fbc21df-e6e6-43e6-80cb-665c18977fe3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.834025] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 656.834642] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ec30aca1-1944-426c-872d-1ca0593502f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.860112] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 656.860112] env[68217]: value = "task-2960793" [ 656.860112] env[68217]: _type = "Task" [ 656.860112] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.861801] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 656.861801] env[68217]: value = "task-2960794" [ 656.861801] env[68217]: _type = "Task" [ 656.861801] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 656.876030] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960793, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 656.880670] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960794, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.323682] env[68217]: DEBUG nova.compute.utils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 657.326213] env[68217]: DEBUG nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 657.326213] env[68217]: DEBUG nova.network.neutron [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 657.367655] env[68217]: DEBUG nova.policy [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eb52491e8627451cb456c8cceb436221', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4a4a74a76c934c8db7890b352f35c31a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 657.378854] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960793, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.382089] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960794, 'name': ReconfigVM_Task, 'duration_secs': 0.352814} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.382361] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 657.382956] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-af86b8e2-b7d3-4b21-9cfd-40e009db4311 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.390313] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 657.390313] env[68217]: value = "task-2960795" [ 657.390313] env[68217]: _type = "Task" [ 657.390313] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.399523] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960795, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 657.527843] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 657.528283] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.690700] env[68217]: DEBUG nova.network.neutron [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Successfully created port: 67b65a99-8a30-4343-a909-d1c4b9b602fc {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 657.828018] env[68217]: DEBUG nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 657.842898] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65f087bd-0236-4e28-8867-0d23862da758 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.041s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 657.844135] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.024s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 657.845936] env[68217]: INFO nova.compute.claims [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 657.873170] env[68217]: DEBUG oslo_vmware.api [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960793, 'name': PowerOnVM_Task, 'duration_secs': 0.525255} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.873430] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 657.873620] env[68217]: INFO nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Took 8.99 seconds to spawn the instance on the hypervisor. [ 657.873792] env[68217]: DEBUG nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 657.874586] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20f1a2a-78d7-4045-a129-9abe3c316a16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.901207] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960795, 'name': Rename_Task, 'duration_secs': 0.149743} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 657.901207] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 657.901389] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2c5801e-613e-4cbe-8833-fb514d9022b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.908967] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 657.908967] env[68217]: value = "task-2960796" [ 657.908967] env[68217]: _type = "Task" [ 657.908967] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 657.917646] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960796, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.391237] env[68217]: INFO nova.compute.manager [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Took 45.14 seconds to build instance. [ 658.419954] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960796, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 658.838215] env[68217]: DEBUG nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 658.860559] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 658.860829] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 658.860988] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 658.861179] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 658.861322] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 658.861468] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 658.861761] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 658.862046] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 658.862250] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 658.862425] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 658.862590] env[68217]: DEBUG nova.virt.hardware [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 658.863535] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d57ff6a-f161-43a4-b1c8-7a39f4f66496 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.872738] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cadaa466-4974-4d4f-b71b-23d3ef430681 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.892798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ad73f499-ab69-413f-8e69-4bf1e1cb5ce2 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "f748cf37-6605-49a2-a418-51667a0fac4a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 64.523s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 658.922994] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960796, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.114055] env[68217]: DEBUG nova.compute.manager [req-d9a793fc-c095-4085-87cb-89bd8ac8f231 req-d8a33e0c-5efe-4d80-bcf7-1d990f912e12 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Received event network-vif-plugged-67b65a99-8a30-4343-a909-d1c4b9b602fc {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 659.114284] env[68217]: DEBUG oslo_concurrency.lockutils [req-d9a793fc-c095-4085-87cb-89bd8ac8f231 req-d8a33e0c-5efe-4d80-bcf7-1d990f912e12 service nova] Acquiring lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.114493] env[68217]: DEBUG oslo_concurrency.lockutils [req-d9a793fc-c095-4085-87cb-89bd8ac8f231 req-d8a33e0c-5efe-4d80-bcf7-1d990f912e12 service nova] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.114657] env[68217]: DEBUG oslo_concurrency.lockutils [req-d9a793fc-c095-4085-87cb-89bd8ac8f231 req-d8a33e0c-5efe-4d80-bcf7-1d990f912e12 service nova] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.114823] env[68217]: DEBUG nova.compute.manager [req-d9a793fc-c095-4085-87cb-89bd8ac8f231 req-d8a33e0c-5efe-4d80-bcf7-1d990f912e12 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] No waiting events found dispatching network-vif-plugged-67b65a99-8a30-4343-a909-d1c4b9b602fc {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 659.114985] env[68217]: WARNING nova.compute.manager [req-d9a793fc-c095-4085-87cb-89bd8ac8f231 req-d8a33e0c-5efe-4d80-bcf7-1d990f912e12 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Received unexpected event network-vif-plugged-67b65a99-8a30-4343-a909-d1c4b9b602fc for instance with vm_state building and task_state spawning. [ 659.165971] env[68217]: DEBUG nova.network.neutron [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Successfully updated port: 67b65a99-8a30-4343-a909-d1c4b9b602fc {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 659.340202] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35638bc-be18-4586-b966-6abb1e29d390 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.348523] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724025b3-bcb7-46d8-bcc0-27fabdf85b43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.382705] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2221b27-fa73-4338-8b40-51a729404a4c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.392354] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce25623-246d-4732-ba81-88dec8993069 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.396352] env[68217]: DEBUG nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 659.409017] env[68217]: DEBUG nova.compute.provider_tree [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 659.419110] env[68217]: DEBUG oslo_vmware.api [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960796, 'name': PowerOnVM_Task, 'duration_secs': 1.061052} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 659.419379] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 659.419588] env[68217]: DEBUG nova.compute.manager [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.420939] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e20222-3628-4c6b-9426-4b0e118fc584 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.513413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "fcddfd72-a130-4efc-82cb-1fb22d33d684" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.513668] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.513874] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "fcddfd72-a130-4efc-82cb-1fb22d33d684-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.514065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 659.514240] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 659.516330] env[68217]: INFO nova.compute.manager [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Terminating instance [ 659.558120] env[68217]: DEBUG nova.compute.manager [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 659.559229] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e4c06d0-dba2-427f-83c7-7fbcc21bc841 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.670552] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "refresh_cache-8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 659.670715] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquired lock "refresh_cache-8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 659.670865] env[68217]: DEBUG nova.network.neutron [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 659.912140] env[68217]: DEBUG nova.scheduler.client.report [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 659.917586] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 659.937189] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 660.020451] env[68217]: DEBUG nova.compute.manager [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 660.020800] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 660.021783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822a6237-87b6-4cc8-92d5-11e2e6c3ec7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.030977] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 660.031253] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9ba41319-4b4a-4d7b-8826-e8ecafe3abea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.070978] env[68217]: INFO nova.compute.manager [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] instance snapshotting [ 660.074026] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ca96cf-dc83-47df-a6bb-2156ad2a7ed9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.097293] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8c37336-c810-48b2-aa19-dbc084682710 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.110301] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 660.110301] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 660.110301] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleting the datastore file [datastore2] fcddfd72-a130-4efc-82cb-1fb22d33d684 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 660.110594] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e41a075d-0563-413d-b46d-7ea4515bfb89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.119241] env[68217]: DEBUG oslo_vmware.api [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 660.119241] env[68217]: value = "task-2960798" [ 660.119241] env[68217]: _type = "Task" [ 660.119241] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.128014] env[68217]: DEBUG oslo_vmware.api [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960798, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.204721] env[68217]: DEBUG nova.network.neutron [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 660.342347] env[68217]: DEBUG nova.network.neutron [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Updating instance_info_cache with network_info: [{"id": "67b65a99-8a30-4343-a909-d1c4b9b602fc", "address": "fa:16:3e:11:5d:1e", "network": {"id": "529dcffc-e97a-4402-b873-d85aa865c2cc", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-137141357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a4a74a76c934c8db7890b352f35c31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67b65a99-8a", "ovs_interfaceid": "67b65a99-8a30-4343-a909-d1c4b9b602fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 660.416961] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.573s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.417748] env[68217]: DEBUG nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 660.421110] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 30.079s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.421368] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 660.421950] env[68217]: INFO nova.compute.manager [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] [instance: 7056fb29-2a2f-4275-a411-4d5f3fcb421f] Successfully reverted task state from None on failure for instance. [ 660.424855] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.277s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 660.425217] env[68217]: DEBUG nova.objects.instance [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lazy-loading 'resources' on Instance uuid 93b49e91-5e9a-4b11-a833-31ab0883e0e8 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server [None req-0ab56ed2-fc1b-46ab-ad6f-cc1020842b57 tempest-ServerDiagnosticsNegativeTest-233162294 tempest-ServerDiagnosticsNegativeTest-233162294-project-member] Exception during message handling: nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 (generation 46): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ad98cc7c-6dad-46e5-b691-789e1460f61b"}]} [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 174, in _process_incoming [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.434661] env[68217]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server raise self.value [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 168, in decorated_function [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server raise self.value [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 159, in decorated_function [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1483, in decorated_function [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 215, in decorated_function [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.435092] env[68217]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server raise self.value [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 205, in decorated_function [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3388, in terminate_instance [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3383, in do_terminate_instance [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server raise self.value [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3376, in do_terminate_instance [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3340, in _delete_instance [ 660.435505] env[68217]: ERROR oslo_messaging.rpc.server self._complete_deletion(context, instance) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 930, in _complete_deletion [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server self._update_resource_tracker(context, instance) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 696, in _update_resource_tracker [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server self.rt.update_usage(context, instance, instance.node) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 415, in inner [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 702, in update_usage [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server self._update(context.elevated(), self.compute_nodes[nodename]) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1375, in _update [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server self._update_to_placement(context, compute_node, startup) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 56, in wrapped_f [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server return Retrying(*dargs, **dkw).call(f, *args, **kw) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 266, in call [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server raise attempt.get() [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 301, in get [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server six.reraise(self.value[0], self.value[1], self.value[2]) [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/six.py", line 724, in reraise [ 660.435955] env[68217]: ERROR oslo_messaging.rpc.server raise value [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/retrying.py", line 251, in call [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server attempt = Attempt(fn(*args, **kwargs), attempt_number, False) [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/resource_tracker.py", line 1360, in _update_to_placement [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server self.reportclient.update_from_provider_tree( [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1502, in update_from_provider_tree [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server self.set_inventory_for_provider( [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/scheduler/client/report.py", line 1007, in set_inventory_for_provider [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server raise exception.ResourceProviderUpdateConflict( [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server nova.exception.ResourceProviderUpdateConflict: A conflict was encountered attempting to update resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 (generation 46): {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ad98cc7c-6dad-46e5-b691-789e1460f61b"}]} [ 660.436571] env[68217]: ERROR oslo_messaging.rpc.server [ 660.612371] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 660.612637] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-af7c5567-e85d-4435-96bf-f1bfd4124d8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.621847] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 660.621847] env[68217]: value = "task-2960799" [ 660.621847] env[68217]: _type = "Task" [ 660.621847] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.634568] env[68217]: DEBUG oslo_vmware.api [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960798, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.637735] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.637958] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 660.638152] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.638379] env[68217]: INFO nova.compute.manager [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Took 0.62 seconds to destroy the instance on the hypervisor. [ 660.638637] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.638906] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960799, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.639221] env[68217]: DEBUG nova.compute.manager [-] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 660.639434] env[68217]: DEBUG nova.network.neutron [-] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 660.847670] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Releasing lock "refresh_cache-8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 660.847670] env[68217]: DEBUG nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Instance network_info: |[{"id": "67b65a99-8a30-4343-a909-d1c4b9b602fc", "address": "fa:16:3e:11:5d:1e", "network": {"id": "529dcffc-e97a-4402-b873-d85aa865c2cc", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-137141357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a4a74a76c934c8db7890b352f35c31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67b65a99-8a", "ovs_interfaceid": "67b65a99-8a30-4343-a909-d1c4b9b602fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 660.847872] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:5d:1e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb18870e-f482-4c7b-8cd4-5c933d3ad294', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67b65a99-8a30-4343-a909-d1c4b9b602fc', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 660.853448] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Creating folder: Project (4a4a74a76c934c8db7890b352f35c31a). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.853742] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4a2a943b-6c12-4d93-9105-27fc9a1a0278 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.869886] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Created folder: Project (4a4a74a76c934c8db7890b352f35c31a) in parent group-v594094. [ 660.870083] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Creating folder: Instances. Parent ref: group-v594165. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 660.870329] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9cc8dbad-1307-453f-b44a-3791ed5ce36e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.882439] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Created folder: Instances in parent group-v594165. [ 660.882728] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 660.882935] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 660.883162] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9509e4c-fa1b-4055-93f8-48e5b4eef31b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.904444] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 660.904444] env[68217]: value = "task-2960802" [ 660.904444] env[68217]: _type = "Task" [ 660.904444] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.914607] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960802, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.930781] env[68217]: DEBUG nova.compute.utils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 660.933094] env[68217]: DEBUG nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 661.077579] env[68217]: INFO nova.compute.manager [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Rebuilding instance [ 661.135274] env[68217]: DEBUG nova.compute.manager [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 661.136392] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27971619-b3c1-49b9-a47c-afa7662997f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.147911] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960799, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.185626] env[68217]: DEBUG nova.compute.manager [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Received event network-changed-67b65a99-8a30-4343-a909-d1c4b9b602fc {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 661.185871] env[68217]: DEBUG nova.compute.manager [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Refreshing instance network info cache due to event network-changed-67b65a99-8a30-4343-a909-d1c4b9b602fc. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 661.186085] env[68217]: DEBUG oslo_concurrency.lockutils [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] Acquiring lock "refresh_cache-8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.186247] env[68217]: DEBUG oslo_concurrency.lockutils [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] Acquired lock "refresh_cache-8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.186390] env[68217]: DEBUG nova.network.neutron [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Refreshing network info cache for port 67b65a99-8a30-4343-a909-d1c4b9b602fc {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 661.417754] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960802, 'name': CreateVM_Task, 'duration_secs': 0.508357} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.418075] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 661.418718] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.418890] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.419237] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 661.419555] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7aa9fc3-6af5-4103-9c5f-123aa968b476 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.424887] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 661.424887] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b18d52-532c-b13c-0994-70358e3d8c5f" [ 661.424887] env[68217]: _type = "Task" [ 661.424887] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.433780] env[68217]: DEBUG nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 661.436888] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b18d52-532c-b13c-0994-70358e3d8c5f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 661.527231] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69949343-dfbe-4477-aba2-8aabc2be012b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.535880] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcd8b06e-4ad1-4318-a992-11a0e6e5b7ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.565735] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd7e97f3-d135-4442-949d-a3eb0cfc6c70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.574300] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81208ff2-ce9b-40a9-bd23-38d09fe5ae63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.589606] env[68217]: DEBUG nova.compute.provider_tree [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.613760] env[68217]: DEBUG nova.network.neutron [-] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.636893] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960799, 'name': CreateSnapshot_Task, 'duration_secs': 0.748717} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.636893] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 661.637630] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69e44cc-f213-4ad8-a4f6-7d8df553e054 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.908269] env[68217]: DEBUG nova.network.neutron [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Updated VIF entry in instance network info cache for port 67b65a99-8a30-4343-a909-d1c4b9b602fc. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 661.908634] env[68217]: DEBUG nova.network.neutron [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Updating instance_info_cache with network_info: [{"id": "67b65a99-8a30-4343-a909-d1c4b9b602fc", "address": "fa:16:3e:11:5d:1e", "network": {"id": "529dcffc-e97a-4402-b873-d85aa865c2cc", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-137141357-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4a4a74a76c934c8db7890b352f35c31a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb18870e-f482-4c7b-8cd4-5c933d3ad294", "external-id": "nsx-vlan-transportzone-76", "segmentation_id": 76, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67b65a99-8a", "ovs_interfaceid": "67b65a99-8a30-4343-a909-d1c4b9b602fc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.936603] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b18d52-532c-b13c-0994-70358e3d8c5f, 'name': SearchDatastore_Task, 'duration_secs': 0.011787} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 661.936855] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 661.937330] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 661.937400] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.937553] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 661.938122] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 661.941440] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ec88244-7c8e-4947-8b96-2b737ecfb0a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.951784] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 661.951784] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 661.952531] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e19c926-673b-4fad-965a-4e9ea4a6e9d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.960130] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 661.960130] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52305ab7-4a65-acc0-cab1-cac955ecd407" [ 661.960130] env[68217]: _type = "Task" [ 661.960130] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 661.967971] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52305ab7-4a65-acc0-cab1-cac955ecd407, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.093364] env[68217]: DEBUG nova.scheduler.client.report [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 662.116035] env[68217]: INFO nova.compute.manager [-] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Took 1.48 seconds to deallocate network for instance. [ 662.157434] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 662.158707] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 662.158931] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-5ef9ab2b-3569-46bc-8eed-9aa31bebc642 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.162115] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d3cf5d81-4b0a-44dc-942f-2edc182b5aa2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.173494] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 662.173494] env[68217]: value = "task-2960804" [ 662.173494] env[68217]: _type = "Task" [ 662.173494] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.178096] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 662.178096] env[68217]: value = "task-2960803" [ 662.178096] env[68217]: _type = "Task" [ 662.178096] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.186176] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960804, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.192146] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960803, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.412466] env[68217]: DEBUG oslo_concurrency.lockutils [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] Releasing lock "refresh_cache-8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 662.412746] env[68217]: DEBUG nova.compute.manager [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Received event network-vif-deleted-e7076bf7-0c58-414f-ad86-6ae8c8b8cc96 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 662.412922] env[68217]: INFO nova.compute.manager [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Neutron deleted interface e7076bf7-0c58-414f-ad86-6ae8c8b8cc96; detaching it from the instance and deleting it from the info cache [ 662.413104] env[68217]: DEBUG nova.network.neutron [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.445480] env[68217]: DEBUG nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 662.474964] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52305ab7-4a65-acc0-cab1-cac955ecd407, 'name': SearchDatastore_Task, 'duration_secs': 0.013457} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.477449] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 662.477728] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.477897] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 662.478138] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.478310] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 662.478484] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 662.478680] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 662.478838] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 662.479111] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 662.479202] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 662.479382] env[68217]: DEBUG nova.virt.hardware [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 662.480783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139e48ef-d32d-4523-911b-89c4e8a66911 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.483636] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc170d8a-85c9-4a5b-926f-c40fd725794a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.490142] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 662.490142] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297f52f-cd51-6b87-1bf8-77b903b9b906" [ 662.490142] env[68217]: _type = "Task" [ 662.490142] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.497900] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f1d51e0-7ba5-4cad-9a0d-fb7b9d9e4fea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.514127] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 662.519760] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Creating folder: Project (b0c5cf0b47e64e04b76809b4c0cd7ce2). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.522651] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6afa5c18-daff-4606-8fc3-d33ff1a4aa40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.524751] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297f52f-cd51-6b87-1bf8-77b903b9b906, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.534743] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Created folder: Project (b0c5cf0b47e64e04b76809b4c0cd7ce2) in parent group-v594094. [ 662.534889] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Creating folder: Instances. Parent ref: group-v594170. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 662.535141] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-df9391c4-d2d0-4d75-aaa0-021180ad424c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.546693] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Created folder: Instances in parent group-v594170. [ 662.546948] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 662.547165] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 662.547376] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6c65ff49-ec38-4bc8-a370-0b8530e73bbd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.566033] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 662.566033] env[68217]: value = "task-2960807" [ 662.566033] env[68217]: _type = "Task" [ 662.566033] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.574029] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960807, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.598175] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.173s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 662.600781] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.770s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 662.602450] env[68217]: INFO nova.compute.claims [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.622748] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 662.625635] env[68217]: INFO nova.scheduler.client.report [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleted allocations for instance 93b49e91-5e9a-4b11-a833-31ab0883e0e8 [ 662.689240] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960804, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.691997] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960803, 'name': PowerOffVM_Task, 'duration_secs': 0.156092} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 662.692318] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 662.692585] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 662.693637] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c5296d5-03ca-4790-b318-5f266e03b825 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.704039] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 662.704211] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-13add97c-724b-45a7-855c-9aafea5e32b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.739802] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 662.740424] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 662.740474] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Deleting the datastore file [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 662.740882] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f91cf33-1aa3-403a-b26d-ddd34c280a16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.751251] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 662.751251] env[68217]: value = "task-2960809" [ 662.751251] env[68217]: _type = "Task" [ 662.751251] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 662.761589] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960809, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 662.917032] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd32c53a-e1dc-41a9-884a-d63d82b6ab89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.927284] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b2cb361-d559-4a23-a58b-2ba6f71c850f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.963055] env[68217]: DEBUG nova.compute.manager [req-990c1ac7-cc28-4323-aa03-c8bf66fdcf54 req-66351786-5406-44ea-9b94-793802888636 service nova] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Detach interface failed, port_id=e7076bf7-0c58-414f-ad86-6ae8c8b8cc96, reason: Instance fcddfd72-a130-4efc-82cb-1fb22d33d684 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 663.003315] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297f52f-cd51-6b87-1bf8-77b903b9b906, 'name': SearchDatastore_Task, 'duration_secs': 0.021028} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.003570] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.003830] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be/8fcccac2-dae1-4af0-a2b2-787e1bb7c9be.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 663.004093] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ccd03912-3f00-4198-93b2-de804ec978b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.011074] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 663.011074] env[68217]: value = "task-2960810" [ 663.011074] env[68217]: _type = "Task" [ 663.011074] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.018906] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960810, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.077620] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960807, 'name': CreateVM_Task, 'duration_secs': 0.284994} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.077620] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 663.077620] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.077789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.077976] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 663.078237] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6fa378d1-8ca9-49b2-9f3d-e8c09c00bf01 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.083593] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 663.083593] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab9a48-a9a8-037c-c65b-3bf645114b6a" [ 663.083593] env[68217]: _type = "Task" [ 663.083593] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.092955] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab9a48-a9a8-037c-c65b-3bf645114b6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.137112] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b5921f2a-a787-40ca-9cee-3fba07b650ec tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "93b49e91-5e9a-4b11-a833-31ab0883e0e8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.259s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.186070] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960804, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.261130] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960809, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.226576} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.261359] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 663.261539] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 663.261733] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 663.523731] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960810, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.558113] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "00d2302b-84d4-42d8-94c7-caf45b925ddf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.558466] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "00d2302b-84d4-42d8-94c7-caf45b925ddf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.558581] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "00d2302b-84d4-42d8-94c7-caf45b925ddf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 663.558807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "00d2302b-84d4-42d8-94c7-caf45b925ddf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 663.559070] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "00d2302b-84d4-42d8-94c7-caf45b925ddf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 663.561743] env[68217]: INFO nova.compute.manager [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Terminating instance [ 663.600598] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab9a48-a9a8-037c-c65b-3bf645114b6a, 'name': SearchDatastore_Task, 'duration_secs': 0.010797} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 663.600598] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 663.600813] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 663.601020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 663.601173] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 663.601365] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 663.601646] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3b181d4-ded4-487a-9023-cbe4c55ff623 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.615646] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 663.615874] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 663.616672] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d10282a0-badd-4710-a72d-f3f288d4a030 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.625096] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 663.625096] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526265ff-1b16-e7b0-ace6-f00f23cbfb4e" [ 663.625096] env[68217]: _type = "Task" [ 663.625096] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.638143] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526265ff-1b16-e7b0-ace6-f00f23cbfb4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 663.691178] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960804, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.024250] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960810, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698099} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.024509] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be/8fcccac2-dae1-4af0-a2b2-787e1bb7c9be.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 664.024723] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 664.025010] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d8b3fdd-7913-49f9-a55c-851ab62f913c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.033447] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 664.033447] env[68217]: value = "task-2960811" [ 664.033447] env[68217]: _type = "Task" [ 664.033447] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.042450] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960811, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.066239] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "refresh_cache-00d2302b-84d4-42d8-94c7-caf45b925ddf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 664.066480] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquired lock "refresh_cache-00d2302b-84d4-42d8-94c7-caf45b925ddf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 664.066722] env[68217]: DEBUG nova.network.neutron [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 664.081185] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61802be4-a719-4b21-8108-c4097868e4cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.091244] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b72d44-425a-41f0-a561-eb9c248b379d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.122155] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f507df4f-95f2-4568-9c50-a81606faa53e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.132552] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc353337-4c0d-4a2c-9736-3a5219784c40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.139464] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526265ff-1b16-e7b0-ace6-f00f23cbfb4e, 'name': SearchDatastore_Task, 'duration_secs': 0.022566} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.140975] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7562dbf1-09f3-4122-92ac-aaf2b0022cca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.152782] env[68217]: DEBUG nova.compute.provider_tree [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 664.158695] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 664.158695] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c917e9-637d-c2ba-afbb-5f1076c97756" [ 664.158695] env[68217]: _type = "Task" [ 664.158695] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.167377] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c917e9-637d-c2ba-afbb-5f1076c97756, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.187970] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960804, 'name': CloneVM_Task, 'duration_secs': 1.991369} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.188285] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Created linked-clone VM from snapshot [ 664.189244] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb5ac58-4594-4000-967f-434eca28f203 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.196498] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Uploading image 5ee26def-ecea-46af-97ae-7fd4f68829bb {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 664.218252] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 664.218252] env[68217]: value = "vm-594169" [ 664.218252] env[68217]: _type = "VirtualMachine" [ 664.218252] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 664.218441] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-26c36432-3d86-4644-ba46-c56d10310d00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.228117] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lease: (returnval){ [ 664.228117] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522647ba-4084-644b-7c6e-3f57567d1b4e" [ 664.228117] env[68217]: _type = "HttpNfcLease" [ 664.228117] env[68217]: } obtained for exporting VM: (result){ [ 664.228117] env[68217]: value = "vm-594169" [ 664.228117] env[68217]: _type = "VirtualMachine" [ 664.228117] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 664.228389] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the lease: (returnval){ [ 664.228389] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522647ba-4084-644b-7c6e-3f57567d1b4e" [ 664.228389] env[68217]: _type = "HttpNfcLease" [ 664.228389] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 664.235627] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 664.235627] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522647ba-4084-644b-7c6e-3f57567d1b4e" [ 664.235627] env[68217]: _type = "HttpNfcLease" [ 664.235627] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 664.291711] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 664.291977] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 664.292149] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 664.292331] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 664.292476] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 664.292619] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 664.292825] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 664.292982] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 664.293161] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 664.293321] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 664.293488] env[68217]: DEBUG nova.virt.hardware [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 664.294357] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48043adf-ec51-46f1-823b-f923c3199976 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.303018] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ea73f8-4be5-4a03-8bf4-e0fb01ce0af4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.316077] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 664.321688] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 664.321965] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 664.322196] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7f667d43-a286-44b2-a71a-70c7de21c3cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.340143] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 664.340143] env[68217]: value = "task-2960813" [ 664.340143] env[68217]: _type = "Task" [ 664.340143] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.347929] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960813, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.543772] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960811, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072349} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.543772] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 664.544359] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b17b95-98fa-4a0f-861b-eeccefdb2573 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.567771] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Reconfiguring VM instance instance-0000001a to attach disk [datastore1] 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be/8fcccac2-dae1-4af0-a2b2-787e1bb7c9be.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 664.568154] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79414087-01c6-4bb9-8fb4-8ad750ec9d4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.591404] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 664.591404] env[68217]: value = "task-2960814" [ 664.591404] env[68217]: _type = "Task" [ 664.591404] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.600555] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960814, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.601392] env[68217]: DEBUG nova.network.neutron [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 664.652081] env[68217]: DEBUG nova.network.neutron [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 664.656592] env[68217]: DEBUG nova.scheduler.client.report [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 664.671538] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c917e9-637d-c2ba-afbb-5f1076c97756, 'name': SearchDatastore_Task, 'duration_secs': 0.027329} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.671779] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 664.672053] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6113feaf-5c21-49c3-9c19-ea10b60786d3/6113feaf-5c21-49c3-9c19-ea10b60786d3.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 664.672855] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-254a5e4d-bbb8-45b9-9b8a-454a53f57256 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.681129] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 664.681129] env[68217]: value = "task-2960815" [ 664.681129] env[68217]: _type = "Task" [ 664.681129] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.692099] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960815, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.737273] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 664.737273] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522647ba-4084-644b-7c6e-3f57567d1b4e" [ 664.737273] env[68217]: _type = "HttpNfcLease" [ 664.737273] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 664.737678] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 664.737678] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522647ba-4084-644b-7c6e-3f57567d1b4e" [ 664.737678] env[68217]: _type = "HttpNfcLease" [ 664.737678] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 664.738525] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69680baa-cb2b-4777-b4b8-248f7fe0a069 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.747418] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522020ea-e112-5643-2dea-0db9943668fb/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 664.747631] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522020ea-e112-5643-2dea-0db9943668fb/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 664.852301] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960813, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.875349] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5d30945f-297b-441f-b971-26627e01a784 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.109650] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960814, 'name': ReconfigVM_Task, 'duration_secs': 0.315806} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.109999] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Reconfigured VM instance instance-0000001a to attach disk [datastore1] 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be/8fcccac2-dae1-4af0-a2b2-787e1bb7c9be.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 665.110838] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-063e5c70-652d-403e-ab37-64aed96fa482 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.121023] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 665.121023] env[68217]: value = "task-2960816" [ 665.121023] env[68217]: _type = "Task" [ 665.121023] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.131147] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960816, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.155612] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Releasing lock "refresh_cache-00d2302b-84d4-42d8-94c7-caf45b925ddf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.155612] env[68217]: DEBUG nova.compute.manager [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 665.155846] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 665.156811] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-160342c3-d05e-46c1-a16f-1b18a9e09c67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.166316] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.565s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 665.166903] env[68217]: DEBUG nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 665.170084] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 665.170885] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 33.168s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 665.172381] env[68217]: INFO nova.compute.claims [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 665.175072] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd379b50-9982-4d44-9dfd-a18ad25304c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.188379] env[68217]: DEBUG oslo_vmware.api [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 665.188379] env[68217]: value = "task-2960817" [ 665.188379] env[68217]: _type = "Task" [ 665.188379] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.195817] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960815, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.202584] env[68217]: DEBUG oslo_vmware.api [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960817, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.354041] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960813, 'name': CreateVM_Task, 'duration_secs': 0.60079} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.354268] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 665.354754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.354940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.355341] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 665.355611] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54a1275b-2c30-4b6c-a53e-936134f11bbb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.362647] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 665.362647] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5214bfbc-ad93-6cff-39ea-0069cfb2d312" [ 665.362647] env[68217]: _type = "Task" [ 665.362647] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.372479] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5214bfbc-ad93-6cff-39ea-0069cfb2d312, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.631684] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960816, 'name': Rename_Task, 'duration_secs': 0.189014} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.632194] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 665.632273] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a653dc6-59e0-4337-b82b-9ef41c3d990d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.641971] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 665.641971] env[68217]: value = "task-2960818" [ 665.641971] env[68217]: _type = "Task" [ 665.641971] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.653633] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960818, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.672509] env[68217]: DEBUG nova.compute.utils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 665.674120] env[68217]: DEBUG nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 665.674318] env[68217]: DEBUG nova.network.neutron [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 665.698637] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960815, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.727971} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.699722] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6113feaf-5c21-49c3-9c19-ea10b60786d3/6113feaf-5c21-49c3-9c19-ea10b60786d3.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 665.700176] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 665.700839] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1bd6bde8-0968-4fa6-b7c3-66a8a4ce5a88 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.707511] env[68217]: DEBUG oslo_vmware.api [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960817, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.715519] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 665.715519] env[68217]: value = "task-2960819" [ 665.715519] env[68217]: _type = "Task" [ 665.715519] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.727240] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960819, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 665.736362] env[68217]: DEBUG nova.policy [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '79e67dcf0b4744de8141a9ebcb2eb899', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6642ee8a8d3496fa2d20fce61ac9b16', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 665.875359] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5214bfbc-ad93-6cff-39ea-0069cfb2d312, 'name': SearchDatastore_Task, 'duration_secs': 0.028738} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 665.875757] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 665.876147] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 665.876442] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 665.876604] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 665.876969] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 665.877303] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2563c9f3-7944-48c8-a7e7-340f7a251f4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.888320] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 665.888788] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 665.891284] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-948823ca-4513-404f-9aa5-de5f06969db1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.896217] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 665.896217] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db2646-95be-e798-35d2-45ddc5be98e7" [ 665.896217] env[68217]: _type = "Task" [ 665.896217] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 665.906373] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db2646-95be-e798-35d2-45ddc5be98e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.049275] env[68217]: DEBUG nova.network.neutron [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Successfully created port: e6b77d68-0640-423e-8233-59c03f209bf7 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 666.153491] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960818, 'name': PowerOnVM_Task} progress is 90%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.178437] env[68217]: DEBUG nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 666.203187] env[68217]: DEBUG oslo_vmware.api [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960817, 'name': PowerOffVM_Task, 'duration_secs': 0.595043} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.203735] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 666.204968] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 666.205284] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f67901b-7982-4e27-adf9-1d5b214b2373 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.226800] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960819, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.11387} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.227081] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 666.228345] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3f41b8-45ac-4fbd-af59-c809d70af68d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.259932] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Reconfiguring VM instance instance-0000001b to attach disk [datastore2] 6113feaf-5c21-49c3-9c19-ea10b60786d3/6113feaf-5c21-49c3-9c19-ea10b60786d3.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 666.263422] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-57510edb-2159-41ef-98c5-9be34361104b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.279075] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 666.284024] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 666.284024] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleting the datastore file [datastore2] 00d2302b-84d4-42d8-94c7-caf45b925ddf {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 666.284024] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f5d1c3a4-402c-4448-a582-900b2ea5bb63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.298466] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 666.298466] env[68217]: value = "task-2960822" [ 666.298466] env[68217]: _type = "Task" [ 666.298466] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.299127] env[68217]: DEBUG oslo_vmware.api [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for the task: (returnval){ [ 666.299127] env[68217]: value = "task-2960821" [ 666.299127] env[68217]: _type = "Task" [ 666.299127] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.322301] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960822, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.329121] env[68217]: DEBUG oslo_vmware.api [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960821, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.415973] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db2646-95be-e798-35d2-45ddc5be98e7, 'name': SearchDatastore_Task, 'duration_secs': 0.015125} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.416943] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2782b602-f396-4de8-bca2-81f7d38199f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.425879] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 666.425879] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527baeca-d854-2390-4e00-453af6788007" [ 666.425879] env[68217]: _type = "Task" [ 666.425879] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.434425] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527baeca-d854-2390-4e00-453af6788007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.653187] env[68217]: DEBUG oslo_vmware.api [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960818, 'name': PowerOnVM_Task, 'duration_secs': 0.665128} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.656223] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 666.656461] env[68217]: INFO nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Took 7.82 seconds to spawn the instance on the hypervisor. [ 666.656647] env[68217]: DEBUG nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 666.657799] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13942dd0-2052-41cf-94e4-0f60a23ca1c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.816884] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960822, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 666.821306] env[68217]: DEBUG oslo_vmware.api [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Task: {'id': task-2960821, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.222751} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.822047] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1917da8-c8f6-4828-aedb-cec77b1e8af3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.824738] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 666.824935] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 666.825139] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 666.825317] env[68217]: INFO nova.compute.manager [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Took 1.67 seconds to destroy the instance on the hypervisor. [ 666.825602] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 666.825756] env[68217]: DEBUG nova.compute.manager [-] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 666.825921] env[68217]: DEBUG nova.network.neutron [-] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 666.832435] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fa3937d-4f12-4f03-ac3f-3dbd3fe2f645 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.864204] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1aebf6-3a2d-4089-b57f-792b9375b675 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.871992] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d2b41e4-df7b-4772-b6dd-93ba9eae3a34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.881039] env[68217]: DEBUG nova.network.neutron [-] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 666.894751] env[68217]: DEBUG nova.compute.provider_tree [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 666.936579] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527baeca-d854-2390-4e00-453af6788007, 'name': SearchDatastore_Task, 'duration_secs': 0.017613} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 666.936864] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 666.937370] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 666.937947] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6fb27265-c199-43aa-adc9-f8fadc310572 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.945346] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 666.945346] env[68217]: value = "task-2960823" [ 666.945346] env[68217]: _type = "Task" [ 666.945346] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 666.954893] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960823, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.180189] env[68217]: INFO nova.compute.manager [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Took 50.20 seconds to build instance. [ 667.193729] env[68217]: DEBUG nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 667.220971] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 667.221119] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 667.221217] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 667.221405] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 667.221551] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 667.221699] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 667.221937] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 667.222119] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 667.222316] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 667.222486] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 667.222662] env[68217]: DEBUG nova.virt.hardware [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 667.223684] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff707b45-c5fb-4980-8674-eaa153f27b8d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.232309] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b509e40-f1e1-4f54-8cd5-bc401c4441f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.313849] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960822, 'name': ReconfigVM_Task, 'duration_secs': 0.735696} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.313849] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Reconfigured VM instance instance-0000001b to attach disk [datastore2] 6113feaf-5c21-49c3-9c19-ea10b60786d3/6113feaf-5c21-49c3-9c19-ea10b60786d3.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 667.314567] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f21b15d-07db-4a19-b5f1-86b3021f6e71 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.322016] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 667.322016] env[68217]: value = "task-2960824" [ 667.322016] env[68217]: _type = "Task" [ 667.322016] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.331892] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960824, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.385798] env[68217]: DEBUG nova.network.neutron [-] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.397957] env[68217]: DEBUG nova.scheduler.client.report [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 667.456759] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960823, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.649943] env[68217]: DEBUG nova.network.neutron [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Successfully updated port: e6b77d68-0640-423e-8233-59c03f209bf7 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 667.658162] env[68217]: DEBUG nova.compute.manager [req-6b76107f-681f-46f7-b935-75fc21f4b34c req-cbcb8883-0b93-46bd-9a4b-d1a04ccaa19d service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Received event network-vif-plugged-e6b77d68-0640-423e-8233-59c03f209bf7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 667.658505] env[68217]: DEBUG oslo_concurrency.lockutils [req-6b76107f-681f-46f7-b935-75fc21f4b34c req-cbcb8883-0b93-46bd-9a4b-d1a04ccaa19d service nova] Acquiring lock "366c780a-2870-4e6e-8cfe-7eec10c363d5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.658824] env[68217]: DEBUG oslo_concurrency.lockutils [req-6b76107f-681f-46f7-b935-75fc21f4b34c req-cbcb8883-0b93-46bd-9a4b-d1a04ccaa19d service nova] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.659104] env[68217]: DEBUG oslo_concurrency.lockutils [req-6b76107f-681f-46f7-b935-75fc21f4b34c req-cbcb8883-0b93-46bd-9a4b-d1a04ccaa19d service nova] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.659371] env[68217]: DEBUG nova.compute.manager [req-6b76107f-681f-46f7-b935-75fc21f4b34c req-cbcb8883-0b93-46bd-9a4b-d1a04ccaa19d service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] No waiting events found dispatching network-vif-plugged-e6b77d68-0640-423e-8233-59c03f209bf7 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 667.659725] env[68217]: WARNING nova.compute.manager [req-6b76107f-681f-46f7-b935-75fc21f4b34c req-cbcb8883-0b93-46bd-9a4b-d1a04ccaa19d service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Received unexpected event network-vif-plugged-e6b77d68-0640-423e-8233-59c03f209bf7 for instance with vm_state building and task_state spawning. [ 667.682166] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edc1b5e7-8508-4d87-811b-983b985b8b04 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.998s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.835584] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960824, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 667.849338] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.849686] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.849934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 667.850166] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.850364] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.853156] env[68217]: INFO nova.compute.manager [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Terminating instance [ 667.888433] env[68217]: INFO nova.compute.manager [-] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Took 1.06 seconds to deallocate network for instance. [ 667.903591] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.733s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 667.904116] env[68217]: DEBUG nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 667.907100] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.461s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 667.907323] env[68217]: DEBUG nova.objects.instance [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lazy-loading 'resources' on Instance uuid 678acc61-1c94-4152-b4e8-7569ab169ab9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 667.957164] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960823, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.87724} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 667.957504] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 667.957721] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 667.957969] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4617ce97-3e23-4549-8532-9f0cdae55ea1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.966855] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 667.966855] env[68217]: value = "task-2960825" [ 667.966855] env[68217]: _type = "Task" [ 667.966855] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 667.975994] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960825, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.157033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "refresh_cache-366c780a-2870-4e6e-8cfe-7eec10c363d5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.157033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquired lock "refresh_cache-366c780a-2870-4e6e-8cfe-7eec10c363d5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 668.157033] env[68217]: DEBUG nova.network.neutron [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 668.184751] env[68217]: DEBUG nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 668.333460] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960824, 'name': Rename_Task, 'duration_secs': 0.869372} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.333774] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 668.334022] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e6b238da-deeb-478a-8128-286cc0ffe88d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.341504] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 668.341504] env[68217]: value = "task-2960826" [ 668.341504] env[68217]: _type = "Task" [ 668.341504] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.349778] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960826, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.358562] env[68217]: DEBUG nova.compute.manager [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 668.358770] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.359634] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eeaae7c-1fcd-4764-971c-fbbaa7818816 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.366758] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 668.367024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e451c304-2df6-467d-80c2-f366e90c5723 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.373767] env[68217]: DEBUG oslo_vmware.api [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 668.373767] env[68217]: value = "task-2960827" [ 668.373767] env[68217]: _type = "Task" [ 668.373767] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.384218] env[68217]: DEBUG oslo_vmware.api [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.395465] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.409096] env[68217]: DEBUG nova.compute.utils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 668.410754] env[68217]: DEBUG nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 668.410936] env[68217]: DEBUG nova.network.neutron [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 668.476847] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960825, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.077606} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.478318] env[68217]: DEBUG nova.policy [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0800d95dafdb4958acf5de250b287cb1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '74b280370d334d128f9ad30ed7bc2a9b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 668.483354] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 668.484491] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b7fd9ca-faa2-40a7-ad7e-aee88ad038fd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.509270] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Reconfiguring VM instance instance-00000017 to attach disk [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 668.512050] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b2fe7c2-f504-4b7e-a452-989362b03b6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.534505] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 668.534505] env[68217]: value = "task-2960828" [ 668.534505] env[68217]: _type = "Task" [ 668.534505] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.547566] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960828, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.709735] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 668.713539] env[68217]: DEBUG nova.network.neutron [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 668.818164] env[68217]: DEBUG nova.network.neutron [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Successfully created port: dbb82d0e-d13c-49a1-8f43-aabdcee3022d {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 668.852836] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960826, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 668.883357] env[68217]: DEBUG oslo_vmware.api [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960827, 'name': PowerOffVM_Task, 'duration_secs': 0.282256} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 668.883652] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 668.883977] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 668.884492] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-22c3710b-7ce4-40a6-bca8-a50935b1f923 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.890223] env[68217]: DEBUG nova.network.neutron [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Updating instance_info_cache with network_info: [{"id": "e6b77d68-0640-423e-8233-59c03f209bf7", "address": "fa:16:3e:b5:01:b4", "network": {"id": "ea6716bf-890a-49ca-9f10-68be76cca86f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1676849196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6642ee8a8d3496fa2d20fce61ac9b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b77d68-06", "ovs_interfaceid": "e6b77d68-0640-423e-8233-59c03f209bf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.915910] env[68217]: DEBUG nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 668.970380] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 668.970380] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 668.970380] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Deleting the datastore file [datastore1] 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 668.970622] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-748e095d-5753-432b-a032-c2ffd22712e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.978058] env[68217]: DEBUG oslo_vmware.api [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for the task: (returnval){ [ 668.978058] env[68217]: value = "task-2960830" [ 668.978058] env[68217]: _type = "Task" [ 668.978058] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 668.987250] env[68217]: DEBUG oslo_vmware.api [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960830, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.034248] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e97e531-e6b8-4dff-a791-927052b049b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.049147] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9740d4-ae4f-4243-bdd6-7664e3b56701 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.052994] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960828, 'name': ReconfigVM_Task, 'duration_secs': 0.450915} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.053331] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Reconfigured VM instance instance-00000017 to attach disk [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5/17bea068-7d7a-4a87-8b27-91a7efcd45c5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 669.054451] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f4b3d232-fa4f-4f7b-b93a-b1213d82b5fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.083828] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad169a9-bfa8-4d9d-be62-c82a4a009259 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.086568] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 669.086568] env[68217]: value = "task-2960831" [ 669.086568] env[68217]: _type = "Task" [ 669.086568] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.093447] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19918b35-9c93-48ba-8b43-92dc902b2538 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.101730] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960831, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.111282] env[68217]: DEBUG nova.compute.provider_tree [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.353383] env[68217]: DEBUG oslo_vmware.api [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960826, 'name': PowerOnVM_Task, 'duration_secs': 0.708657} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.353751] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 669.354049] env[68217]: INFO nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Took 6.91 seconds to spawn the instance on the hypervisor. [ 669.354325] env[68217]: DEBUG nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 669.355270] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a43db207-6efc-40f4-9948-b970be82f928 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.394854] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Releasing lock "refresh_cache-366c780a-2870-4e6e-8cfe-7eec10c363d5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 669.394854] env[68217]: DEBUG nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Instance network_info: |[{"id": "e6b77d68-0640-423e-8233-59c03f209bf7", "address": "fa:16:3e:b5:01:b4", "network": {"id": "ea6716bf-890a-49ca-9f10-68be76cca86f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1676849196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6642ee8a8d3496fa2d20fce61ac9b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b77d68-06", "ovs_interfaceid": "e6b77d68-0640-423e-8233-59c03f209bf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 669.395115] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b5:01:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c68b7663-4f0e-47f0-ac7f-40c6d952f7bb', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e6b77d68-0640-423e-8233-59c03f209bf7', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 669.402785] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Creating folder: Project (c6642ee8a8d3496fa2d20fce61ac9b16). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.403245] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e27ee2ac-db2d-4676-bc72-3c3107d74683 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.416869] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Created folder: Project (c6642ee8a8d3496fa2d20fce61ac9b16) in parent group-v594094. [ 669.417174] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Creating folder: Instances. Parent ref: group-v594174. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 669.417446] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5e20090-f8cf-4a48-8f3c-80cfcfef1a14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.432214] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Created folder: Instances in parent group-v594174. [ 669.432591] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.432639] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 669.432899] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-891d72b2-46e2-4590-835f-aa246a56b061 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.453707] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 669.453707] env[68217]: value = "task-2960834" [ 669.453707] env[68217]: _type = "Task" [ 669.453707] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.462400] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960834, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.488755] env[68217]: DEBUG oslo_vmware.api [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Task: {'id': task-2960830, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.255637} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.489018] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 669.489217] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 669.489390] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 669.489603] env[68217]: INFO nova.compute.manager [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Took 1.13 seconds to destroy the instance on the hypervisor. [ 669.489857] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 669.490059] env[68217]: DEBUG nova.compute.manager [-] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 669.490157] env[68217]: DEBUG nova.network.neutron [-] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 669.598441] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960831, 'name': Rename_Task, 'duration_secs': 0.193562} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.598717] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 669.598965] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0949772c-8943-4966-88c8-0295ca64759e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.606670] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Waiting for the task: (returnval){ [ 669.606670] env[68217]: value = "task-2960835" [ 669.606670] env[68217]: _type = "Task" [ 669.606670] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.615668] env[68217]: DEBUG nova.scheduler.client.report [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 669.618895] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960835, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 669.732978] env[68217]: DEBUG nova.compute.manager [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Received event network-changed-e6b77d68-0640-423e-8233-59c03f209bf7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 669.733258] env[68217]: DEBUG nova.compute.manager [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Refreshing instance network info cache due to event network-changed-e6b77d68-0640-423e-8233-59c03f209bf7. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 669.733472] env[68217]: DEBUG oslo_concurrency.lockutils [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] Acquiring lock "refresh_cache-366c780a-2870-4e6e-8cfe-7eec10c363d5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.733545] env[68217]: DEBUG oslo_concurrency.lockutils [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] Acquired lock "refresh_cache-366c780a-2870-4e6e-8cfe-7eec10c363d5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.733721] env[68217]: DEBUG nova.network.neutron [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Refreshing network info cache for port e6b77d68-0640-423e-8233-59c03f209bf7 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 669.874463] env[68217]: INFO nova.compute.manager [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Took 41.09 seconds to build instance. [ 669.926079] env[68217]: DEBUG nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 669.965993] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960834, 'name': CreateVM_Task, 'duration_secs': 0.490684} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 669.966232] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 669.966893] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 669.967069] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 669.967416] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 669.967636] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36129180-08a1-4347-a313-13a7a1b48ef4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.972266] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 669.972266] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5260a0bc-dc37-121b-5da0-ef23322368d7" [ 669.972266] env[68217]: _type = "Task" [ 669.972266] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 669.980521] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5260a0bc-dc37-121b-5da0-ef23322368d7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.029173] env[68217]: DEBUG nova.compute.manager [req-74081b69-423b-49dd-8ac7-5d23051c0f2f req-06ea5382-5e01-48af-9704-4523303cd4a4 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Received event network-vif-deleted-67b65a99-8a30-4343-a909-d1c4b9b602fc {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 670.029301] env[68217]: INFO nova.compute.manager [req-74081b69-423b-49dd-8ac7-5d23051c0f2f req-06ea5382-5e01-48af-9704-4523303cd4a4 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Neutron deleted interface 67b65a99-8a30-4343-a909-d1c4b9b602fc; detaching it from the instance and deleting it from the info cache [ 670.029676] env[68217]: DEBUG nova.network.neutron [req-74081b69-423b-49dd-8ac7-5d23051c0f2f req-06ea5382-5e01-48af-9704-4523303cd4a4 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.117011] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960835, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.120999] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.214s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.123250] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.218s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.123548] env[68217]: DEBUG nova.objects.instance [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lazy-loading 'resources' on Instance uuid aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 670.126728] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 670.126964] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 670.127221] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 670.127422] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 670.127570] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 670.127725] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 670.127932] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 670.128114] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 670.128374] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 670.128450] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 670.128599] env[68217]: DEBUG nova.virt.hardware [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 670.129466] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90862fa8-dbaf-4739-bdde-53a61d8eca30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.145449] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de25bb34-7106-48f8-af6a-c717801f40c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.151912] env[68217]: INFO nova.scheduler.client.report [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Deleted allocations for instance 678acc61-1c94-4152-b4e8-7569ab169ab9 [ 670.362288] env[68217]: DEBUG nova.network.neutron [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Successfully updated port: dbb82d0e-d13c-49a1-8f43-aabdcee3022d {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 670.372447] env[68217]: DEBUG nova.network.neutron [-] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.376347] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e2081f9a-5411-4636-a6f2-da6e1ec7234f tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "6113feaf-5c21-49c3-9c19-ea10b60786d3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 69.547s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.494251] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5260a0bc-dc37-121b-5da0-ef23322368d7, 'name': SearchDatastore_Task, 'duration_secs': 0.029763} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.495349] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 670.495349] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 670.495349] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.495349] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.495477] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 670.495743] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef0d443b-eafe-4d70-aac6-f661561a4376 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.506289] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 670.506551] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 670.507307] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74af5133-572b-4d6e-8ead-b9d5edc3cf55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.513106] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 670.513106] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522f13a1-dba0-6d10-900e-a419f0c847cc" [ 670.513106] env[68217]: _type = "Task" [ 670.513106] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 670.521872] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522f13a1-dba0-6d10-900e-a419f0c847cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 670.532218] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3f0d93c0-1054-4a2b-8a8b-a48261b0e81b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.541726] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81df9fad-3b72-4141-b5f4-468174f01d17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.575505] env[68217]: DEBUG nova.compute.manager [req-74081b69-423b-49dd-8ac7-5d23051c0f2f req-06ea5382-5e01-48af-9704-4523303cd4a4 service nova] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Detach interface failed, port_id=67b65a99-8a30-4343-a909-d1c4b9b602fc, reason: Instance 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 670.607147] env[68217]: DEBUG nova.network.neutron [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Updated VIF entry in instance network info cache for port e6b77d68-0640-423e-8233-59c03f209bf7. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 670.607517] env[68217]: DEBUG nova.network.neutron [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Updating instance_info_cache with network_info: [{"id": "e6b77d68-0640-423e-8233-59c03f209bf7", "address": "fa:16:3e:b5:01:b4", "network": {"id": "ea6716bf-890a-49ca-9f10-68be76cca86f", "bridge": "br-int", "label": "tempest-ServerAddressesTestJSON-1676849196-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6642ee8a8d3496fa2d20fce61ac9b16", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c68b7663-4f0e-47f0-ac7f-40c6d952f7bb", "external-id": "nsx-vlan-transportzone-696", "segmentation_id": 696, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape6b77d68-06", "ovs_interfaceid": "e6b77d68-0640-423e-8233-59c03f209bf7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 670.617556] env[68217]: DEBUG oslo_vmware.api [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Task: {'id': task-2960835, 'name': PowerOnVM_Task, 'duration_secs': 0.673176} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 670.618154] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 670.618154] env[68217]: DEBUG nova.compute.manager [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.618981] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa32c26a-d747-430f-b278-2bbc7de5fab1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.645353] env[68217]: DEBUG nova.compute.manager [None req-0ba771f0-81eb-4495-8aeb-cae54b60e118 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 670.646252] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-064de886-3fa7-42b1-b9a5-6fce54fc1088 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.666112] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a4de4eb6-213b-47b3-a676-40992f3f2be4 tempest-ImagesNegativeTestJSON-1481145656 tempest-ImagesNegativeTestJSON-1481145656-project-member] Lock "678acc61-1c94-4152-b4e8-7569ab169ab9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 38.718s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.713971] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0997c7a4-9755-4fe2-9f1a-f45feef2e417 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.721362] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-181b5bd8-2eef-4358-a25c-ccea4ce806af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.753020] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7415442-3996-4a08-9a50-ae1c3f877caa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.760320] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301ee094-3a5a-4dcc-a895-663200adf338 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 670.765929] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "6113feaf-5c21-49c3-9c19-ea10b60786d3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.766238] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "6113feaf-5c21-49c3-9c19-ea10b60786d3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.766507] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "6113feaf-5c21-49c3-9c19-ea10b60786d3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 670.766715] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "6113feaf-5c21-49c3-9c19-ea10b60786d3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 670.766882] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "6113feaf-5c21-49c3-9c19-ea10b60786d3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 670.776307] env[68217]: DEBUG nova.compute.provider_tree [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 670.777823] env[68217]: INFO nova.compute.manager [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Terminating instance [ 670.865134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "refresh_cache-cf457d43-b939-4284-b84d-9075895e9dda" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 670.865311] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired lock "refresh_cache-cf457d43-b939-4284-b84d-9075895e9dda" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 670.865471] env[68217]: DEBUG nova.network.neutron [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 670.876058] env[68217]: INFO nova.compute.manager [-] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Took 1.39 seconds to deallocate network for instance. [ 670.878022] env[68217]: DEBUG nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 671.023977] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522f13a1-dba0-6d10-900e-a419f0c847cc, 'name': SearchDatastore_Task, 'duration_secs': 0.012932} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.024777] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ab6c0157-d978-4cb8-beb1-291b32b868de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.030061] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 671.030061] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fbd6d-6dd7-015f-841b-8953d3d580b8" [ 671.030061] env[68217]: _type = "Task" [ 671.030061] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.038351] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fbd6d-6dd7-015f-841b-8953d3d580b8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.113682] env[68217]: DEBUG oslo_concurrency.lockutils [req-42ccc67c-917c-4cf5-a8b4-fdc2a8dcb834 req-99cda341-7d5b-4ceb-a00c-98ac10442e98 service nova] Releasing lock "refresh_cache-366c780a-2870-4e6e-8cfe-7eec10c363d5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.137671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.158687] env[68217]: INFO nova.compute.manager [None req-0ba771f0-81eb-4495-8aeb-cae54b60e118 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] instance snapshotting [ 671.159341] env[68217]: DEBUG nova.objects.instance [None req-0ba771f0-81eb-4495-8aeb-cae54b60e118 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lazy-loading 'flavor' on Instance uuid 6113feaf-5c21-49c3-9c19-ea10b60786d3 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 671.281855] env[68217]: DEBUG nova.scheduler.client.report [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 671.284772] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "refresh_cache-6113feaf-5c21-49c3-9c19-ea10b60786d3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 671.284933] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquired lock "refresh_cache-6113feaf-5c21-49c3-9c19-ea10b60786d3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 671.285119] env[68217]: DEBUG nova.network.neutron [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 671.388031] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.406153] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.437158] env[68217]: DEBUG nova.network.neutron [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.543544] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fbd6d-6dd7-015f-841b-8953d3d580b8, 'name': SearchDatastore_Task, 'duration_secs': 0.023938} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 671.544764] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 671.545093] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 366c780a-2870-4e6e-8cfe-7eec10c363d5/366c780a-2870-4e6e-8cfe-7eec10c363d5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 671.545375] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b95df265-4b50-4525-8bc8-637fe960afd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.552466] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 671.552466] env[68217]: value = "task-2960836" [ 671.552466] env[68217]: _type = "Task" [ 671.552466] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 671.561836] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960836, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 671.666526] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bfc14c-c2a6-4661-8ba9-2e32cc633a9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.685785] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeadca64-980e-4c4e-8322-ae75bf432384 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.787645] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.664s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.793423] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 35.633s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.794642] env[68217]: DEBUG nova.objects.instance [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lazy-loading 'resources' on Instance uuid 14c8e8e6-5d7f-45b4-8a84-d5951c38573f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 671.806025] env[68217]: DEBUG nova.network.neutron [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Updating instance_info_cache with network_info: [{"id": "dbb82d0e-d13c-49a1-8f43-aabdcee3022d", "address": "fa:16:3e:56:f1:68", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbb82d0e-d1", "ovs_interfaceid": "dbb82d0e-d13c-49a1-8f43-aabdcee3022d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.816503] env[68217]: DEBUG nova.network.neutron [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 671.834578] env[68217]: INFO nova.scheduler.client.report [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleted allocations for instance aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5 [ 671.899464] env[68217]: DEBUG nova.network.neutron [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 671.934233] env[68217]: DEBUG nova.compute.manager [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Received event network-vif-plugged-dbb82d0e-d13c-49a1-8f43-aabdcee3022d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.934233] env[68217]: DEBUG oslo_concurrency.lockutils [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] Acquiring lock "cf457d43-b939-4284-b84d-9075895e9dda-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 671.934389] env[68217]: DEBUG oslo_concurrency.lockutils [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] Lock "cf457d43-b939-4284-b84d-9075895e9dda-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 671.935049] env[68217]: DEBUG oslo_concurrency.lockutils [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] Lock "cf457d43-b939-4284-b84d-9075895e9dda-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 671.935049] env[68217]: DEBUG nova.compute.manager [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] No waiting events found dispatching network-vif-plugged-dbb82d0e-d13c-49a1-8f43-aabdcee3022d {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 671.935049] env[68217]: WARNING nova.compute.manager [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Received unexpected event network-vif-plugged-dbb82d0e-d13c-49a1-8f43-aabdcee3022d for instance with vm_state building and task_state spawning. [ 671.935353] env[68217]: DEBUG nova.compute.manager [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Received event network-changed-dbb82d0e-d13c-49a1-8f43-aabdcee3022d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 671.935353] env[68217]: DEBUG nova.compute.manager [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Refreshing instance network info cache due to event network-changed-dbb82d0e-d13c-49a1-8f43-aabdcee3022d. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 671.935441] env[68217]: DEBUG oslo_concurrency.lockutils [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] Acquiring lock "refresh_cache-cf457d43-b939-4284-b84d-9075895e9dda" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.062675] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960836, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.200705] env[68217]: DEBUG nova.compute.manager [None req-0ba771f0-81eb-4495-8aeb-cae54b60e118 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Instance disappeared during snapshot {{(pid=68217) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 672.305543] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Releasing lock "refresh_cache-cf457d43-b939-4284-b84d-9075895e9dda" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.305900] env[68217]: DEBUG nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Instance network_info: |[{"id": "dbb82d0e-d13c-49a1-8f43-aabdcee3022d", "address": "fa:16:3e:56:f1:68", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbb82d0e-d1", "ovs_interfaceid": "dbb82d0e-d13c-49a1-8f43-aabdcee3022d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 672.306206] env[68217]: DEBUG oslo_concurrency.lockutils [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] Acquired lock "refresh_cache-cf457d43-b939-4284-b84d-9075895e9dda" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 672.306376] env[68217]: DEBUG nova.network.neutron [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Refreshing network info cache for port dbb82d0e-d13c-49a1-8f43-aabdcee3022d {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 672.307924] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:56:f1:68', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dbb82d0e-d13c-49a1-8f43-aabdcee3022d', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 672.316234] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 672.316729] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 672.317868] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caf26507-8453-4874-88d2-bb51c5e61ec3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.345063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e029f72-9533-4bc8-9664-e65efad602cb tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.460s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.348026] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 672.348026] env[68217]: value = "task-2960837" [ 672.348026] env[68217]: _type = "Task" [ 672.348026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.364390] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960837, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.384887] env[68217]: DEBUG nova.compute.manager [None req-0ba771f0-81eb-4495-8aeb-cae54b60e118 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Found 0 images (rotation: 2) {{(pid=68217) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 672.405521] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Releasing lock "refresh_cache-6113feaf-5c21-49c3-9c19-ea10b60786d3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 672.405521] env[68217]: DEBUG nova.compute.manager [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 672.405521] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 672.405521] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d379c7b7-83e0-47c8-987f-be3da685d21f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.419021] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 672.419021] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52e36fe0-c39a-4fda-9f28-0340e04b204b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.428134] env[68217]: DEBUG oslo_vmware.api [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 672.428134] env[68217]: value = "task-2960838" [ 672.428134] env[68217]: _type = "Task" [ 672.428134] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.439724] env[68217]: DEBUG oslo_vmware.api [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960838, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.565022] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960836, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526529} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.565022] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 366c780a-2870-4e6e-8cfe-7eec10c363d5/366c780a-2870-4e6e-8cfe-7eec10c363d5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 672.565022] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 672.565022] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3502e07c-cdd9-446f-ac8c-9ef1a3ece1be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.578178] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 672.578178] env[68217]: value = "task-2960839" [ 672.578178] env[68217]: _type = "Task" [ 672.578178] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 672.591781] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960839, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.652729] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.653114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.653398] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 672.653595] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 672.655123] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 672.655959] env[68217]: INFO nova.compute.manager [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Terminating instance [ 672.864704] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960837, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 672.921619] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72e5b4da-2bdf-4b3d-bf8b-4685f1ed16c8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.934835] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-865bf623-6a9e-4e56-9eb0-ee3ea74df144 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.947669] env[68217]: DEBUG oslo_vmware.api [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960838, 'name': PowerOffVM_Task, 'duration_secs': 0.132644} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 672.977841] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 672.978065] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 672.981191] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6c90dae-415b-4184-a0ce-77488020fa30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.983443] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5852d852-0969-4d09-9a90-171e571a169f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.995050] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d558d767-38b8-4ea3-b549-5731b53d690a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.009701] env[68217]: DEBUG nova.compute.provider_tree [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 673.012357] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 673.012698] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 673.012964] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Deleting the datastore file [datastore2] 6113feaf-5c21-49c3-9c19-ea10b60786d3 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 673.013310] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-23e287d1-5243-424f-99f3-de961d114c27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.022519] env[68217]: DEBUG oslo_vmware.api [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for the task: (returnval){ [ 673.022519] env[68217]: value = "task-2960841" [ 673.022519] env[68217]: _type = "Task" [ 673.022519] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.032679] env[68217]: DEBUG oslo_vmware.api [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960841, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.088218] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960839, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069424} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.088686] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 673.089458] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70173023-5051-463d-9e62-6fedeb4b252d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.113124] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Reconfiguring VM instance instance-0000001c to attach disk [datastore2] 366c780a-2870-4e6e-8cfe-7eec10c363d5/366c780a-2870-4e6e-8cfe-7eec10c363d5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 673.116054] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-89a662c6-8e10-4e8d-95bd-535a33165248 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.138201] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 673.138201] env[68217]: value = "task-2960842" [ 673.138201] env[68217]: _type = "Task" [ 673.138201] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.146971] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960842, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.161761] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "refresh_cache-17bea068-7d7a-4a87-8b27-91a7efcd45c5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.161934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquired lock "refresh_cache-17bea068-7d7a-4a87-8b27-91a7efcd45c5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.162139] env[68217]: DEBUG nova.network.neutron [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 673.261469] env[68217]: DEBUG nova.network.neutron [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Updated VIF entry in instance network info cache for port dbb82d0e-d13c-49a1-8f43-aabdcee3022d. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 673.261469] env[68217]: DEBUG nova.network.neutron [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Updating instance_info_cache with network_info: [{"id": "dbb82d0e-d13c-49a1-8f43-aabdcee3022d", "address": "fa:16:3e:56:f1:68", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.75", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdbb82d0e-d1", "ovs_interfaceid": "dbb82d0e-d13c-49a1-8f43-aabdcee3022d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.362850] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960837, 'name': CreateVM_Task, 'duration_secs': 0.641134} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.363350] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 673.364108] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.364321] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.364593] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 673.365188] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-02c38631-393f-4637-92b7-599d69046c21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.370493] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 673.370493] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521b2afb-05b3-fd0b-3cbb-a7ec10a9598b" [ 673.370493] env[68217]: _type = "Task" [ 673.370493] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.381283] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521b2afb-05b3-fd0b-3cbb-a7ec10a9598b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.519503] env[68217]: DEBUG nova.scheduler.client.report [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 673.534681] env[68217]: DEBUG oslo_vmware.api [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Task: {'id': task-2960841, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.218423} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.534681] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 673.534681] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 673.535103] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 673.535963] env[68217]: INFO nova.compute.manager [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Took 1.13 seconds to destroy the instance on the hypervisor. [ 673.535963] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 673.535963] env[68217]: DEBUG nova.compute.manager [-] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 673.536123] env[68217]: DEBUG nova.network.neutron [-] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 673.558466] env[68217]: DEBUG nova.network.neutron [-] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.648036] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 673.695474] env[68217]: DEBUG nova.network.neutron [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 673.764620] env[68217]: DEBUG nova.network.neutron [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.766061] env[68217]: DEBUG oslo_concurrency.lockutils [req-f2a22db5-7d69-4581-a9ab-3fa3a7ceebdf req-82fd90d4-9460-4adb-957c-c7ef14d87942 service nova] Releasing lock "refresh_cache-cf457d43-b939-4284-b84d-9075895e9dda" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.881412] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521b2afb-05b3-fd0b-3cbb-a7ec10a9598b, 'name': SearchDatastore_Task, 'duration_secs': 0.023935} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 673.881762] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 673.881985] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 673.882239] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.882384] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 673.882559] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 673.882824] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c10f87e4-6bab-48c5-8152-d6d02165dd5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.891478] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 673.891661] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 673.892402] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a4f3d6e-9f07-407c-b45e-1e2a9d9a0d4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.898397] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 673.898397] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e91ac8-e23f-6277-2ad9-413b937e94b3" [ 673.898397] env[68217]: _type = "Task" [ 673.898397] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 673.910274] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e91ac8-e23f-6277-2ad9-413b937e94b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.029408] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.236s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.031925] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.439s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.033524] env[68217]: INFO nova.compute.claims [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 674.052776] env[68217]: INFO nova.scheduler.client.report [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted allocations for instance 14c8e8e6-5d7f-45b4-8a84-d5951c38573f [ 674.061800] env[68217]: DEBUG nova.network.neutron [-] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 674.147132] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960842, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.268962] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Releasing lock "refresh_cache-17bea068-7d7a-4a87-8b27-91a7efcd45c5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.268962] env[68217]: DEBUG nova.compute.manager [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 674.268962] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.269930] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2355a1d-d649-4fac-9a98-55a653670905 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.277812] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 674.278064] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7f8408a1-9ee8-47a4-a351-165301d2dba2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.283656] env[68217]: DEBUG oslo_vmware.api [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 674.283656] env[68217]: value = "task-2960843" [ 674.283656] env[68217]: _type = "Task" [ 674.283656] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.291507] env[68217]: DEBUG oslo_vmware.api [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960843, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.334249] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "63e0fc9e-5182-4781-b007-69e2134718df" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.334470] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "63e0fc9e-5182-4781-b007-69e2134718df" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.334679] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "63e0fc9e-5182-4781-b007-69e2134718df-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.334876] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "63e0fc9e-5182-4781-b007-69e2134718df-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.335070] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "63e0fc9e-5182-4781-b007-69e2134718df-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.337402] env[68217]: INFO nova.compute.manager [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Terminating instance [ 674.406145] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.406510] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.406737] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 674.406922] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 674.407142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.409373] env[68217]: INFO nova.compute.manager [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Terminating instance [ 674.415918] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e91ac8-e23f-6277-2ad9-413b937e94b3, 'name': SearchDatastore_Task, 'duration_secs': 0.033686} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.416980] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7d5d8a45-5235-46ba-a481-0b0de4c83941 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.424268] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 674.424268] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c80497-d8e4-f9db-a057-1ebc269665d3" [ 674.424268] env[68217]: _type = "Task" [ 674.424268] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.435635] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c80497-d8e4-f9db-a057-1ebc269665d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.562433] env[68217]: DEBUG oslo_concurrency.lockutils [None req-888e4281-dc11-4ddb-9f16-57f74ccd90ed tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "14c8e8e6-5d7f-45b4-8a84-d5951c38573f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 41.865s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 674.564030] env[68217]: INFO nova.compute.manager [-] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Took 1.03 seconds to deallocate network for instance. [ 674.650020] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960842, 'name': ReconfigVM_Task, 'duration_secs': 1.442337} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.650020] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Reconfigured VM instance instance-0000001c to attach disk [datastore2] 366c780a-2870-4e6e-8cfe-7eec10c363d5/366c780a-2870-4e6e-8cfe-7eec10c363d5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 674.650020] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-661b4715-739e-4400-895e-32a3a502d585 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.656337] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 674.656337] env[68217]: value = "task-2960844" [ 674.656337] env[68217]: _type = "Task" [ 674.656337] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.664786] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960844, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.795167] env[68217]: DEBUG oslo_vmware.api [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960843, 'name': PowerOffVM_Task, 'duration_secs': 0.173091} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.795942] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 674.796582] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 674.798168] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-634679a9-c22f-4ad3-975e-e121ada2a575 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.818606] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522020ea-e112-5643-2dea-0db9943668fb/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 674.819593] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f28f48-a1de-4421-b291-d34818e34fc8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.823900] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 674.824221] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 674.824399] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Deleting the datastore file [datastore2] 17bea068-7d7a-4a87-8b27-91a7efcd45c5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 674.825015] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b2eabef1-8d13-437a-9302-47da1b022809 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.828627] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522020ea-e112-5643-2dea-0db9943668fb/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 674.828862] env[68217]: ERROR oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522020ea-e112-5643-2dea-0db9943668fb/disk-0.vmdk due to incomplete transfer. [ 674.829111] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-1cdf4577-d31e-4b6b-a885-bf5617c72ad6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.833328] env[68217]: DEBUG oslo_vmware.api [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for the task: (returnval){ [ 674.833328] env[68217]: value = "task-2960846" [ 674.833328] env[68217]: _type = "Task" [ 674.833328] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.836819] env[68217]: DEBUG oslo_vmware.rw_handles [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/522020ea-e112-5643-2dea-0db9943668fb/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 674.837083] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Uploaded image 5ee26def-ecea-46af-97ae-7fd4f68829bb to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 674.839877] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 674.840494] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2b968b71-022f-478a-9120-560764032dd6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.845508] env[68217]: DEBUG nova.compute.manager [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 674.845792] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.846539] env[68217]: DEBUG oslo_vmware.api [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960846, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.847364] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcba4e88-e9ab-4da0-8eed-20279b03726c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.854026] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 674.855254] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-60002448-01ba-46dd-b3df-2f861870a338 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.856729] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 674.856729] env[68217]: value = "task-2960847" [ 674.856729] env[68217]: _type = "Task" [ 674.856729] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.862449] env[68217]: DEBUG oslo_vmware.api [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 674.862449] env[68217]: value = "task-2960848" [ 674.862449] env[68217]: _type = "Task" [ 674.862449] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.868670] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960847, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.873088] env[68217]: DEBUG oslo_vmware.api [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960848, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.917423] env[68217]: DEBUG nova.compute.manager [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 674.917816] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.918919] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d0820f-e8d1-4a01-abaf-4647e97a5081 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.930948] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 674.931611] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7685126a-5260-43f5-a264-c3a60dd611dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.937878] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c80497-d8e4-f9db-a057-1ebc269665d3, 'name': SearchDatastore_Task, 'duration_secs': 0.01604} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 674.938548] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 674.938859] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] cf457d43-b939-4284-b84d-9075895e9dda/cf457d43-b939-4284-b84d-9075895e9dda.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 674.939157] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a87d8b81-8e23-4e93-8afe-a511d61be4d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.942377] env[68217]: DEBUG oslo_vmware.api [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 674.942377] env[68217]: value = "task-2960849" [ 674.942377] env[68217]: _type = "Task" [ 674.942377] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.948815] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 674.948815] env[68217]: value = "task-2960850" [ 674.948815] env[68217]: _type = "Task" [ 674.948815] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 674.952924] env[68217]: DEBUG oslo_vmware.api [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960849, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 674.961474] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.070747] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.086550] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "9ac81867-311c-42f3-b38f-67dc10f409c0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.087151] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.087267] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "9ac81867-311c-42f3-b38f-67dc10f409c0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 675.087518] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 675.087773] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 675.090643] env[68217]: INFO nova.compute.manager [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Terminating instance [ 675.170894] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960844, 'name': Rename_Task, 'duration_secs': 0.163138} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.171246] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 675.171556] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01f2c547-0c61-4f9a-9f74-22b07eab2efc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.184045] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 675.184045] env[68217]: value = "task-2960851" [ 675.184045] env[68217]: _type = "Task" [ 675.184045] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.194095] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960851, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.347585] env[68217]: DEBUG oslo_vmware.api [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Task: {'id': task-2960846, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.122011} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.348558] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.348558] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.348558] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.348558] env[68217]: INFO nova.compute.manager [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Took 1.08 seconds to destroy the instance on the hypervisor. [ 675.348778] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.348894] env[68217]: DEBUG nova.compute.manager [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.348991] env[68217]: DEBUG nova.network.neutron [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 675.371810] env[68217]: DEBUG nova.network.neutron [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 675.378046] env[68217]: DEBUG oslo_vmware.api [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960848, 'name': PowerOffVM_Task, 'duration_secs': 0.210451} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.381836] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 675.382071] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 675.382368] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960847, 'name': Destroy_Task, 'duration_secs': 0.416515} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.385951] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bbf5e87a-7e23-4dbb-a84f-3436b4b49281 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.387696] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Destroyed the VM [ 675.389011] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 675.389905] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4f95b0eb-a973-463d-93b8-ef3daf84aa24 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.399501] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 675.399501] env[68217]: value = "task-2960852" [ 675.399501] env[68217]: _type = "Task" [ 675.399501] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.414784] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960852, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.456869] env[68217]: DEBUG oslo_vmware.api [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960849, 'name': PowerOffVM_Task, 'duration_secs': 0.201457} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.460976] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 675.461245] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 675.464406] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c37495bc-10d4-4bd1-934b-1def80405282 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.472306] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 675.472583] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 675.472774] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleting the datastore file [datastore1] 63e0fc9e-5182-4781-b007-69e2134718df {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 675.476451] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b73f3249-0b4d-4e84-9c4f-8cacc2c621f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.478753] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960850, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.487473] env[68217]: DEBUG oslo_vmware.api [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 675.487473] env[68217]: value = "task-2960855" [ 675.487473] env[68217]: _type = "Task" [ 675.487473] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.504132] env[68217]: DEBUG oslo_vmware.api [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.533547] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 675.533799] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 675.533979] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleting the datastore file [datastore1] 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 675.537729] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-410bf4d6-feaf-4cde-9544-f9a6e77a5b48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.545045] env[68217]: DEBUG oslo_vmware.api [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for the task: (returnval){ [ 675.545045] env[68217]: value = "task-2960856" [ 675.545045] env[68217]: _type = "Task" [ 675.545045] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.555747] env[68217]: DEBUG oslo_vmware.api [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.598192] env[68217]: DEBUG nova.compute.manager [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 675.598192] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 675.599815] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0c7bbb-1820-41ad-8e29-10aacc5d7b90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.607815] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 675.609035] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-57e77eb2-43d4-4201-b419-821875618a3f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.611445] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ef7abf6-2798-4ada-8961-a7a424b10862 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.620487] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52df45d-090d-4f3d-911c-0fd00c482a47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.623838] env[68217]: DEBUG oslo_vmware.api [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 675.623838] env[68217]: value = "task-2960857" [ 675.623838] env[68217]: _type = "Task" [ 675.623838] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.658709] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-325baa17-df70-4e21-b286-988c0ca7355d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.664578] env[68217]: DEBUG oslo_vmware.api [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960857, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.669748] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6603ee7-bcb1-4bf0-ba25-06966562a447 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.685415] env[68217]: DEBUG nova.compute.provider_tree [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 675.695102] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960851, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.883038] env[68217]: DEBUG nova.network.neutron [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.908730] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960852, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.967394] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.551551} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.967757] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] cf457d43-b939-4284-b84d-9075895e9dda/cf457d43-b939-4284-b84d-9075895e9dda.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 675.967919] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 675.968211] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0111a6ab-dc16-4e4b-bd6c-6d4f4e384461 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.974668] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 675.974668] env[68217]: value = "task-2960858" [ 675.974668] env[68217]: _type = "Task" [ 675.974668] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 675.983165] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960858, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 675.996998] env[68217]: DEBUG oslo_vmware.api [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.192346} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 675.997259] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 675.997441] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 675.997617] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 675.997788] env[68217]: INFO nova.compute.manager [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Took 1.15 seconds to destroy the instance on the hypervisor. [ 675.998034] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 675.998303] env[68217]: DEBUG nova.compute.manager [-] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 675.998421] env[68217]: DEBUG nova.network.neutron [-] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 676.054443] env[68217]: DEBUG oslo_vmware.api [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.134226] env[68217]: DEBUG oslo_vmware.api [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960857, 'name': PowerOffVM_Task, 'duration_secs': 0.241353} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.134508] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 676.134688] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 676.134944] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cedb096d-4b96-4d9f-82a2-e989a7511ac6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.191467] env[68217]: DEBUG nova.scheduler.client.report [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 676.199849] env[68217]: DEBUG oslo_vmware.api [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960851, 'name': PowerOnVM_Task, 'duration_secs': 0.642254} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.200500] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 676.200636] env[68217]: INFO nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Took 9.01 seconds to spawn the instance on the hypervisor. [ 676.200821] env[68217]: DEBUG nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 676.201666] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ef0e96-e753-4e42-b63a-0a5a839f4cb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.241383] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 676.241985] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 676.241985] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleting the datastore file [datastore2] 9ac81867-311c-42f3-b38f-67dc10f409c0 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 676.242343] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d2778dab-4ff3-4de0-b059-a05445a97e73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.250023] env[68217]: DEBUG oslo_vmware.api [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 676.250023] env[68217]: value = "task-2960860" [ 676.250023] env[68217]: _type = "Task" [ 676.250023] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.259652] env[68217]: DEBUG oslo_vmware.api [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960860, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.329699] env[68217]: DEBUG nova.compute.manager [req-c3eb85be-69af-4082-a2b0-66ec5580eaa6 req-0bbc1acc-0161-4e09-9098-4e0018643539 service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Received event network-vif-deleted-c4d2ead2-9ca2-4172-bf51-fd9673282e6b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 676.330034] env[68217]: INFO nova.compute.manager [req-c3eb85be-69af-4082-a2b0-66ec5580eaa6 req-0bbc1acc-0161-4e09-9098-4e0018643539 service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Neutron deleted interface c4d2ead2-9ca2-4172-bf51-fd9673282e6b; detaching it from the instance and deleting it from the info cache [ 676.330095] env[68217]: DEBUG nova.network.neutron [req-c3eb85be-69af-4082-a2b0-66ec5580eaa6 req-0bbc1acc-0161-4e09-9098-4e0018643539 service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.385409] env[68217]: INFO nova.compute.manager [-] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Took 1.04 seconds to deallocate network for instance. [ 676.414010] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960852, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.485320] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960858, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.276782} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.485643] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 676.486565] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61dbb1b9-4203-4282-a965-77ce98732366 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.511652] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Reconfiguring VM instance instance-0000001d to attach disk [datastore1] cf457d43-b939-4284-b84d-9075895e9dda/cf457d43-b939-4284-b84d-9075895e9dda.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 676.511982] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-68560fad-cf59-4ff7-b5e4-9c6691eed055 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.531799] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 676.531799] env[68217]: value = "task-2960861" [ 676.531799] env[68217]: _type = "Task" [ 676.531799] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 676.539435] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960861, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 676.553035] env[68217]: DEBUG oslo_vmware.api [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Task: {'id': task-2960856, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.534483} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.553309] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 676.553506] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 676.553684] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 676.553883] env[68217]: INFO nova.compute.manager [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Took 1.64 seconds to destroy the instance on the hypervisor. [ 676.554151] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 676.554347] env[68217]: DEBUG nova.compute.manager [-] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 676.554443] env[68217]: DEBUG nova.network.neutron [-] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 676.702011] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.670s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 676.702882] env[68217]: DEBUG nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 676.705513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.958s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 676.706969] env[68217]: INFO nova.compute.claims [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.726195] env[68217]: INFO nova.compute.manager [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Took 44.91 seconds to build instance. [ 676.738293] env[68217]: DEBUG nova.network.neutron [-] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.769535] env[68217]: DEBUG oslo_vmware.api [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960860, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.510436} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.769943] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 676.770273] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 676.770568] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 676.770869] env[68217]: INFO nova.compute.manager [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Took 1.17 seconds to destroy the instance on the hypervisor. [ 676.771340] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 676.772413] env[68217]: DEBUG nova.compute.manager [-] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 676.772596] env[68217]: DEBUG nova.network.neutron [-] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 676.833663] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e348cef2-e772-4028-b6c4-9473b8271359 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.846025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb0b0b8-3aea-4760-a423-06dfaac08687 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.882556] env[68217]: DEBUG nova.compute.manager [req-c3eb85be-69af-4082-a2b0-66ec5580eaa6 req-0bbc1acc-0161-4e09-9098-4e0018643539 service nova] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Detach interface failed, port_id=c4d2ead2-9ca2-4172-bf51-fd9673282e6b, reason: Instance 63e0fc9e-5182-4781-b007-69e2134718df could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 676.891947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 676.909938] env[68217]: DEBUG oslo_vmware.api [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960852, 'name': RemoveSnapshot_Task, 'duration_secs': 1.442854} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 676.910401] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 676.910725] env[68217]: INFO nova.compute.manager [None req-352d4b5f-6365-4611-b7ea-6e8f6f62313d tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Took 16.84 seconds to snapshot the instance on the hypervisor. [ 677.042338] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.213738] env[68217]: DEBUG nova.compute.utils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 677.216568] env[68217]: DEBUG nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 677.216568] env[68217]: DEBUG nova.network.neutron [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 677.225725] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c5564e5-4c64-40cf-bd91-da21387e0d6b tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.473s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.244165] env[68217]: INFO nova.compute.manager [-] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Took 1.25 seconds to deallocate network for instance. [ 677.269323] env[68217]: DEBUG nova.compute.manager [req-90d12f72-2c9b-4fae-8214-eecf7dc2a594 req-c13692c1-bd0e-4dae-b8d2-06057f731df3 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Received event network-vif-deleted-be83d87a-5096-4216-a402-35655f496cc0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 677.269510] env[68217]: INFO nova.compute.manager [req-90d12f72-2c9b-4fae-8214-eecf7dc2a594 req-c13692c1-bd0e-4dae-b8d2-06057f731df3 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Neutron deleted interface be83d87a-5096-4216-a402-35655f496cc0; detaching it from the instance and deleting it from the info cache [ 677.269765] env[68217]: DEBUG nova.network.neutron [req-90d12f72-2c9b-4fae-8214-eecf7dc2a594 req-c13692c1-bd0e-4dae-b8d2-06057f731df3 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.290318] env[68217]: DEBUG nova.policy [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '554b6b3d22404c0ba52c739b3c7b98a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb62d18446841a3b2a6ac25ab5dc869', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 677.337745] env[68217]: DEBUG nova.network.neutron [-] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.545185] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960861, 'name': ReconfigVM_Task, 'duration_secs': 0.863197} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 677.545185] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Reconfigured VM instance instance-0000001d to attach disk [datastore1] cf457d43-b939-4284-b84d-9075895e9dda/cf457d43-b939-4284-b84d-9075895e9dda.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 677.545395] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-cd43c6a7-d0ac-4230-868b-0a536e4c57e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.551313] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 677.551313] env[68217]: value = "task-2960862" [ 677.551313] env[68217]: _type = "Task" [ 677.551313] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 677.559855] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960862, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 677.564165] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "366c780a-2870-4e6e-8cfe-7eec10c363d5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.564165] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.564650] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "366c780a-2870-4e6e-8cfe-7eec10c363d5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.564884] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 677.565112] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 677.567093] env[68217]: INFO nova.compute.manager [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Terminating instance [ 677.579478] env[68217]: DEBUG nova.network.neutron [-] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.720807] env[68217]: DEBUG nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 677.729607] env[68217]: DEBUG nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 677.753120] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 677.773180] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5b253236-7f61-43a6-bd88-d2aa987809cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.778274] env[68217]: DEBUG nova.network.neutron [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Successfully created port: 105398c1-34ae-4691-9fb2-8cf58f067ec3 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 677.789773] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18b27b6-cabc-4270-96f9-38da25bee8d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 677.824102] env[68217]: DEBUG nova.compute.manager [req-90d12f72-2c9b-4fae-8214-eecf7dc2a594 req-c13692c1-bd0e-4dae-b8d2-06057f731df3 service nova] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Detach interface failed, port_id=be83d87a-5096-4216-a402-35655f496cc0, reason: Instance 9ac81867-311c-42f3-b38f-67dc10f409c0 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 677.841277] env[68217]: INFO nova.compute.manager [-] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Took 1.29 seconds to deallocate network for instance. [ 678.066655] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960862, 'name': Rename_Task, 'duration_secs': 0.168023} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.066926] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 678.066960] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dbfd832d-cc0d-42d6-a604-e3c7d6cf1c4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.074484] env[68217]: DEBUG nova.compute.manager [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 678.074685] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 678.075621] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5616841-aa62-4211-85ee-6deff7fcaef6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.084278] env[68217]: INFO nova.compute.manager [-] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Took 1.31 seconds to deallocate network for instance. [ 678.084613] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 678.087119] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a352b3ce-0250-48d1-b631-ccd1cea0939b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.089172] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 678.089172] env[68217]: value = "task-2960863" [ 678.089172] env[68217]: _type = "Task" [ 678.089172] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.099263] env[68217]: DEBUG oslo_vmware.api [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 678.099263] env[68217]: value = "task-2960864" [ 678.099263] env[68217]: _type = "Task" [ 678.099263] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.103091] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960863, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.112558] env[68217]: DEBUG oslo_vmware.api [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.252067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.321140] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39b6606a-aa1a-4e7f-8687-3b5c429b61cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.328524] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5d2bdbd-65d1-4ce9-8860-9ecfdc72ae96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.360689] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.361834] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9f8934c-bed6-4bf0-b670-a888c1a5fab1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.369039] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf94eecc-69e3-4503-97ad-8dac1abbd0ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.382027] env[68217]: DEBUG nova.compute.provider_tree [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.560298] env[68217]: DEBUG nova.compute.manager [req-93b97977-5168-4b71-9442-8bb2d832a4dd req-78392904-dced-4d7a-8da3-8410d72f0ed3 service nova] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Received event network-vif-deleted-746296a3-f700-4dff-823d-7b979c3ed19d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 678.595751] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 678.606133] env[68217]: DEBUG oslo_vmware.api [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960863, 'name': PowerOnVM_Task, 'duration_secs': 0.520416} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.609197] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 678.609407] env[68217]: INFO nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Took 8.68 seconds to spawn the instance on the hypervisor. [ 678.609582] env[68217]: DEBUG nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 678.610754] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3955ddc8-e731-4500-8b05-14adb56a6631 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.620838] env[68217]: DEBUG oslo_vmware.api [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960864, 'name': PowerOffVM_Task, 'duration_secs': 0.227367} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 678.620838] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 678.620838] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 678.622946] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-89fa8650-0e34-409b-a883-ae95d8f14db1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.696124] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 678.696360] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 678.696537] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Deleting the datastore file [datastore2] 366c780a-2870-4e6e-8cfe-7eec10c363d5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 678.696794] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1f69dd6-bbea-49c9-acae-ee2979688ad5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.704171] env[68217]: DEBUG oslo_vmware.api [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for the task: (returnval){ [ 678.704171] env[68217]: value = "task-2960866" [ 678.704171] env[68217]: _type = "Task" [ 678.704171] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 678.712389] env[68217]: DEBUG oslo_vmware.api [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 678.731596] env[68217]: DEBUG nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 678.757169] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 678.757329] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 678.757465] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 678.757644] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 678.757783] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 678.757923] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 678.758139] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 678.758295] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 678.758453] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 678.758611] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 678.758784] env[68217]: DEBUG nova.virt.hardware [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 678.759712] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cc944a9-a9b4-4931-abf5-d1af815f15e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.768750] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-157db104-74a7-4b96-85a3-6ea69b0a3910 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.885504] env[68217]: DEBUG nova.scheduler.client.report [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 679.131239] env[68217]: INFO nova.compute.manager [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Took 47.14 seconds to build instance. [ 679.216622] env[68217]: DEBUG oslo_vmware.api [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 679.370172] env[68217]: DEBUG nova.compute.manager [req-971c0e67-f8d2-47e2-91aa-16e6d32f40cb req-3a1af555-680a-4c2d-a6f2-0e74e320d2fb service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Received event network-vif-plugged-105398c1-34ae-4691-9fb2-8cf58f067ec3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 679.370172] env[68217]: DEBUG oslo_concurrency.lockutils [req-971c0e67-f8d2-47e2-91aa-16e6d32f40cb req-3a1af555-680a-4c2d-a6f2-0e74e320d2fb service nova] Acquiring lock "1675982e-0702-482b-9fe6-fd4eb9d83311-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 679.370172] env[68217]: DEBUG oslo_concurrency.lockutils [req-971c0e67-f8d2-47e2-91aa-16e6d32f40cb req-3a1af555-680a-4c2d-a6f2-0e74e320d2fb service nova] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.370172] env[68217]: DEBUG oslo_concurrency.lockutils [req-971c0e67-f8d2-47e2-91aa-16e6d32f40cb req-3a1af555-680a-4c2d-a6f2-0e74e320d2fb service nova] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.370172] env[68217]: DEBUG nova.compute.manager [req-971c0e67-f8d2-47e2-91aa-16e6d32f40cb req-3a1af555-680a-4c2d-a6f2-0e74e320d2fb service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] No waiting events found dispatching network-vif-plugged-105398c1-34ae-4691-9fb2-8cf58f067ec3 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 679.370388] env[68217]: WARNING nova.compute.manager [req-971c0e67-f8d2-47e2-91aa-16e6d32f40cb req-3a1af555-680a-4c2d-a6f2-0e74e320d2fb service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Received unexpected event network-vif-plugged-105398c1-34ae-4691-9fb2-8cf58f067ec3 for instance with vm_state building and task_state spawning. [ 679.392783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.685s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.392783] env[68217]: DEBUG nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 679.394918] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 31.827s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 679.397961] env[68217]: INFO nova.compute.claims [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 679.465827] env[68217]: DEBUG nova.compute.manager [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 679.466806] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd6d25d2-5546-4f29-bce7-300245191e36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 679.480538] env[68217]: DEBUG nova.network.neutron [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Successfully updated port: 105398c1-34ae-4691-9fb2-8cf58f067ec3 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 679.633170] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40b81a81-c8ea-441b-9460-b625c3205baa tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "cf457d43-b939-4284-b84d-9075895e9dda" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 76.624s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 679.716167] env[68217]: DEBUG oslo_vmware.api [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Task: {'id': task-2960866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.643284} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 679.716167] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 679.716167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 679.716426] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 679.716534] env[68217]: INFO nova.compute.manager [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Took 1.64 seconds to destroy the instance on the hypervisor. [ 679.716721] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 679.716909] env[68217]: DEBUG nova.compute.manager [-] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 679.716998] env[68217]: DEBUG nova.network.neutron [-] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 679.907779] env[68217]: DEBUG nova.compute.utils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 679.913105] env[68217]: DEBUG nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 679.913105] env[68217]: DEBUG nova.network.neutron [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 679.958984] env[68217]: DEBUG nova.policy [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59eda9327200478b833b9b9f823ec1b9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2a12aa1bfa73469096139bdc472689ef', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 679.983770] env[68217]: INFO nova.compute.manager [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] instance snapshotting [ 679.985922] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-1675982e-0702-482b-9fe6-fd4eb9d83311" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 679.986073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-1675982e-0702-482b-9fe6-fd4eb9d83311" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 679.986220] env[68217]: DEBUG nova.network.neutron [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 679.988011] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-958ab4b0-08d7-4f1c-9d78-e695e5dc37f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.010557] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a92159-491e-45c4-bcf1-1e6bfeb39546 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.135308] env[68217]: DEBUG nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 680.167758] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "cf457d43-b939-4284-b84d-9075895e9dda" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.167758] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "cf457d43-b939-4284-b84d-9075895e9dda" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.167921] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "cf457d43-b939-4284-b84d-9075895e9dda-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.168100] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "cf457d43-b939-4284-b84d-9075895e9dda-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 680.168304] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "cf457d43-b939-4284-b84d-9075895e9dda-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 680.170435] env[68217]: INFO nova.compute.manager [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Terminating instance [ 680.247163] env[68217]: DEBUG nova.network.neutron [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Successfully created port: 6da8ccf1-d23a-4ebd-8053-78f80e526699 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.410999] env[68217]: DEBUG nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 680.522660] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 680.523240] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-e0543134-3adb-4959-8301-95ce37b8203e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.527679] env[68217]: DEBUG nova.network.neutron [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 680.531878] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 680.531878] env[68217]: value = "task-2960867" [ 680.531878] env[68217]: _type = "Task" [ 680.531878] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.542983] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960867, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.547428] env[68217]: DEBUG nova.network.neutron [-] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.654555] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 680.674439] env[68217]: DEBUG nova.compute.manager [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 680.674667] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 680.675706] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-379ca524-ab6e-4994-a190-af119a50754f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.683032] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 680.683273] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6b2556e-f094-4a62-b68b-cfb07d30f2dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.692666] env[68217]: DEBUG oslo_vmware.api [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 680.692666] env[68217]: value = "task-2960868" [ 680.692666] env[68217]: _type = "Task" [ 680.692666] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 680.705562] env[68217]: DEBUG oslo_vmware.api [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960868, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 680.786924] env[68217]: DEBUG nova.network.neutron [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Updating instance_info_cache with network_info: [{"id": "105398c1-34ae-4691-9fb2-8cf58f067ec3", "address": "fa:16:3e:41:10:0c", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap105398c1-34", "ovs_interfaceid": "105398c1-34ae-4691-9fb2-8cf58f067ec3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 680.877547] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22efda3-f152-418b-83e3-bdd65e716036 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.884539] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81229bd-201e-4c78-bc7d-0c6c482254a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.917310] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e810a0c-f3d1-4df9-adf2-8d743c89b75d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.929107] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-293433e8-99c7-4ab9-aa84-d37d5bcaad7e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.944024] env[68217]: DEBUG nova.compute.provider_tree [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 681.047913] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960867, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.051306] env[68217]: INFO nova.compute.manager [-] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Took 1.33 seconds to deallocate network for instance. [ 681.203403] env[68217]: DEBUG oslo_vmware.api [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960868, 'name': PowerOffVM_Task, 'duration_secs': 0.193581} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.203723] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 681.203842] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 681.204114] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe23761e-7e46-4248-a9b3-e9049071e038 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.269483] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 681.269770] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 681.270344] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Deleting the datastore file [datastore1] cf457d43-b939-4284-b84d-9075895e9dda {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 681.270344] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e01fb6e-97a7-4660-b3f3-73aa46017ff5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.277322] env[68217]: DEBUG oslo_vmware.api [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for the task: (returnval){ [ 681.277322] env[68217]: value = "task-2960870" [ 681.277322] env[68217]: _type = "Task" [ 681.277322] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.285308] env[68217]: DEBUG oslo_vmware.api [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960870, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.290208] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-1675982e-0702-482b-9fe6-fd4eb9d83311" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 681.290208] env[68217]: DEBUG nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Instance network_info: |[{"id": "105398c1-34ae-4691-9fb2-8cf58f067ec3", "address": "fa:16:3e:41:10:0c", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap105398c1-34", "ovs_interfaceid": "105398c1-34ae-4691-9fb2-8cf58f067ec3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 681.290326] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:10:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '105398c1-34ae-4691-9fb2-8cf58f067ec3', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 681.297817] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating folder: Project (8fb62d18446841a3b2a6ac25ab5dc869). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.298078] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-092111e4-0fda-4167-8264-65d8c1f0d4d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.311091] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created folder: Project (8fb62d18446841a3b2a6ac25ab5dc869) in parent group-v594094. [ 681.311288] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating folder: Instances. Parent ref: group-v594179. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 681.311520] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c117c0cb-845b-4bdc-baed-bf8266e0f6a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.322072] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created folder: Instances in parent group-v594179. [ 681.322326] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 681.322507] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 681.322724] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82489d48-3fb4-4d1f-93da-3920443f0a99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.341886] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 681.341886] env[68217]: value = "task-2960873" [ 681.341886] env[68217]: _type = "Task" [ 681.341886] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.352178] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960873, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.396979] env[68217]: DEBUG nova.compute.manager [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Received event network-changed-105398c1-34ae-4691-9fb2-8cf58f067ec3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.397196] env[68217]: DEBUG nova.compute.manager [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Refreshing instance network info cache due to event network-changed-105398c1-34ae-4691-9fb2-8cf58f067ec3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 681.397413] env[68217]: DEBUG oslo_concurrency.lockutils [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] Acquiring lock "refresh_cache-1675982e-0702-482b-9fe6-fd4eb9d83311" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.397551] env[68217]: DEBUG oslo_concurrency.lockutils [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] Acquired lock "refresh_cache-1675982e-0702-482b-9fe6-fd4eb9d83311" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.397752] env[68217]: DEBUG nova.network.neutron [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Refreshing network info cache for port 105398c1-34ae-4691-9fb2-8cf58f067ec3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 681.425644] env[68217]: DEBUG nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 681.447208] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 681.447446] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.447600] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 681.447779] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.447921] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 681.448077] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 681.448281] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 681.448436] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 681.448597] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 681.448752] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 681.448999] env[68217]: DEBUG nova.virt.hardware [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 681.449848] env[68217]: DEBUG nova.scheduler.client.report [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 681.453458] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93199e4a-519d-4087-83fc-95c9da06eb4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.461353] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfa37b71-bdc7-48f5-b44d-08aeaaf9feab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.545022] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960867, 'name': CreateSnapshot_Task, 'duration_secs': 0.515417} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.545329] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 681.546068] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da07bbc-8594-41a3-9142-2b268bdceec0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.560939] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.605250] env[68217]: DEBUG nova.compute.manager [req-f58fa89f-3324-49a3-b86c-5a62e61574db req-ca7db373-75f5-4cba-951a-508eb934a1eb service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Received event network-vif-plugged-6da8ccf1-d23a-4ebd-8053-78f80e526699 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 681.605470] env[68217]: DEBUG oslo_concurrency.lockutils [req-f58fa89f-3324-49a3-b86c-5a62e61574db req-ca7db373-75f5-4cba-951a-508eb934a1eb service nova] Acquiring lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 681.605674] env[68217]: DEBUG oslo_concurrency.lockutils [req-f58fa89f-3324-49a3-b86c-5a62e61574db req-ca7db373-75f5-4cba-951a-508eb934a1eb service nova] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.605838] env[68217]: DEBUG oslo_concurrency.lockutils [req-f58fa89f-3324-49a3-b86c-5a62e61574db req-ca7db373-75f5-4cba-951a-508eb934a1eb service nova] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.606013] env[68217]: DEBUG nova.compute.manager [req-f58fa89f-3324-49a3-b86c-5a62e61574db req-ca7db373-75f5-4cba-951a-508eb934a1eb service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] No waiting events found dispatching network-vif-plugged-6da8ccf1-d23a-4ebd-8053-78f80e526699 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 681.606313] env[68217]: WARNING nova.compute.manager [req-f58fa89f-3324-49a3-b86c-5a62e61574db req-ca7db373-75f5-4cba-951a-508eb934a1eb service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Received unexpected event network-vif-plugged-6da8ccf1-d23a-4ebd-8053-78f80e526699 for instance with vm_state building and task_state spawning. [ 681.686655] env[68217]: DEBUG nova.network.neutron [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Successfully updated port: 6da8ccf1-d23a-4ebd-8053-78f80e526699 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 681.787535] env[68217]: DEBUG oslo_vmware.api [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Task: {'id': task-2960870, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150182} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.787810] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 681.788011] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 681.788201] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 681.788377] env[68217]: INFO nova.compute.manager [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Took 1.11 seconds to destroy the instance on the hypervisor. [ 681.788606] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 681.788790] env[68217]: DEBUG nova.compute.manager [-] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 681.788992] env[68217]: DEBUG nova.network.neutron [-] [instance: cf457d43-b939-4284-b84d-9075895e9dda] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 681.853050] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960873, 'name': CreateVM_Task, 'duration_secs': 0.344793} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 681.853170] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 681.857227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.857227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 681.857227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 681.857227] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-269d4f4b-9299-4a70-95a8-e75ad6a23dd7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.859244] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 681.859244] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ecbb99-3721-165d-ad79-4def4462a0b2" [ 681.859244] env[68217]: _type = "Task" [ 681.859244] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 681.868882] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ecbb99-3721-165d-ad79-4def4462a0b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 681.957486] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.562s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 681.958049] env[68217]: DEBUG nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 681.961621] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 30.794s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 681.963155] env[68217]: INFO nova.compute.claims [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 682.063759] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 682.064442] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-af4feb2e-ce5d-4611-890f-12779c2f69d3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.074044] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 682.074044] env[68217]: value = "task-2960874" [ 682.074044] env[68217]: _type = "Task" [ 682.074044] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.082114] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960874, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.191179] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.191388] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquired lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.191489] env[68217]: DEBUG nova.network.neutron [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 682.198090] env[68217]: DEBUG nova.network.neutron [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Updated VIF entry in instance network info cache for port 105398c1-34ae-4691-9fb2-8cf58f067ec3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 682.198448] env[68217]: DEBUG nova.network.neutron [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Updating instance_info_cache with network_info: [{"id": "105398c1-34ae-4691-9fb2-8cf58f067ec3", "address": "fa:16:3e:41:10:0c", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap105398c1-34", "ovs_interfaceid": "105398c1-34ae-4691-9fb2-8cf58f067ec3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.369271] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ecbb99-3721-165d-ad79-4def4462a0b2, 'name': SearchDatastore_Task, 'duration_secs': 0.009447} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.369562] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.369820] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 682.370065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 682.370214] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 682.370389] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 682.370651] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6b2423b-51f3-454b-a4e6-2882eb2e8673 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.378759] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 682.379021] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 682.379793] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4922402e-6be1-4593-9438-542155db100a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.386842] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 682.386842] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c9b6e3-03c1-b406-66ae-74e8a20399a9" [ 682.386842] env[68217]: _type = "Task" [ 682.386842] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.394214] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c9b6e3-03c1-b406-66ae-74e8a20399a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.470802] env[68217]: DEBUG nova.compute.utils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 682.473158] env[68217]: DEBUG nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 682.473339] env[68217]: DEBUG nova.network.neutron [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 682.522875] env[68217]: DEBUG nova.policy [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cec0c4325164a28a663f79559271d79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '023b801c234d47d79cb57ea73058e81c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 682.535954] env[68217]: DEBUG nova.network.neutron [-] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.584280] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960874, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.703896] env[68217]: DEBUG oslo_concurrency.lockutils [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] Releasing lock "refresh_cache-1675982e-0702-482b-9fe6-fd4eb9d83311" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 682.704787] env[68217]: DEBUG nova.compute.manager [req-bb74b172-2d58-4c28-ad49-477ec8fedeb2 req-63522e9a-4505-4910-b055-7f4b967e29b3 service nova] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Received event network-vif-deleted-e6b77d68-0640-423e-8233-59c03f209bf7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 682.726112] env[68217]: DEBUG nova.network.neutron [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 682.801951] env[68217]: DEBUG nova.network.neutron [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Successfully created port: 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 682.856456] env[68217]: DEBUG nova.network.neutron [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Updating instance_info_cache with network_info: [{"id": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "address": "fa:16:3e:80:24:d0", "network": {"id": "45d10375-0689-4266-8175-4c6cac851356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2078150535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a12aa1bfa73469096139bdc472689ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da8ccf1-d2", "ovs_interfaceid": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.897688] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c9b6e3-03c1-b406-66ae-74e8a20399a9, 'name': SearchDatastore_Task, 'duration_secs': 0.009383} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 682.898505] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd5e32be-cca5-4c19-9cb9-6dc21784bcd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.903860] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 682.903860] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52285495-e2b7-b5b9-64e2-55e0f5f1f283" [ 682.903860] env[68217]: _type = "Task" [ 682.903860] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 682.912419] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52285495-e2b7-b5b9-64e2-55e0f5f1f283, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 682.979149] env[68217]: DEBUG nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 683.039877] env[68217]: INFO nova.compute.manager [-] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Took 1.25 seconds to deallocate network for instance. [ 683.093358] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960874, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.360046] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Releasing lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.360046] env[68217]: DEBUG nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Instance network_info: |[{"id": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "address": "fa:16:3e:80:24:d0", "network": {"id": "45d10375-0689-4266-8175-4c6cac851356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2078150535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a12aa1bfa73469096139bdc472689ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da8ccf1-d2", "ovs_interfaceid": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 683.360240] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:80:24:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '271fe7a0-dfd7-409b-920a-cf83ef1a86a3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6da8ccf1-d23a-4ebd-8053-78f80e526699', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 683.368024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Creating folder: Project (2a12aa1bfa73469096139bdc472689ef). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.370700] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-598a9519-3b21-4b15-b234-c61884b1fb8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.390572] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Created folder: Project (2a12aa1bfa73469096139bdc472689ef) in parent group-v594094. [ 683.390572] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Creating folder: Instances. Parent ref: group-v594183. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 683.390737] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4e66bca0-f6c4-4723-8063-453d70fed14a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.424715] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52285495-e2b7-b5b9-64e2-55e0f5f1f283, 'name': SearchDatastore_Task, 'duration_secs': 0.01121} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.424981] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Created folder: Instances in parent group-v594183. [ 683.425235] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 683.425844] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 683.425844] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1675982e-0702-482b-9fe6-fd4eb9d83311/1675982e-0702-482b-9fe6-fd4eb9d83311.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 683.425844] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 683.426103] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-27cd94c2-9a6e-4220-b142-09116aa2026b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.427955] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1c90f5f-c479-4a29-bffb-c53304303f99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.450887] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 683.450887] env[68217]: value = "task-2960877" [ 683.450887] env[68217]: _type = "Task" [ 683.450887] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.452131] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 683.452131] env[68217]: value = "task-2960878" [ 683.452131] env[68217]: _type = "Task" [ 683.452131] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 683.466704] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960877, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.469209] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960878, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.487798] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54b44665-c727-4f39-ae40-771ba613cb30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.496553] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ad2e9c-62c6-405f-83e8-53acb05f66e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.537367] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0344dbf3-1c15-45bc-99a4-ff9c104ee7ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.547574] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e8a9c6-6a28-4f11-941c-5da98152aab1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.553642] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 683.568044] env[68217]: DEBUG nova.compute.provider_tree [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 683.587913] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960874, 'name': CloneVM_Task, 'duration_secs': 1.199024} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 683.588279] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Created linked-clone VM from snapshot [ 683.589106] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0efe4a51-48dd-4539-ae37-1ec4c3990b13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.599866] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Uploading image 6bfbe66f-7dc2-4c4a-b095-09003f931d3a {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 683.632126] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 683.632126] env[68217]: value = "vm-594182" [ 683.632126] env[68217]: _type = "VirtualMachine" [ 683.632126] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 683.632516] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0ef8e5f1-1db9-4ab0-8bd1-04de634e1723 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 683.641478] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lease: (returnval){ [ 683.641478] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529c4924-972c-8968-e56e-bc61ac44ccf7" [ 683.641478] env[68217]: _type = "HttpNfcLease" [ 683.641478] env[68217]: } obtained for exporting VM: (result){ [ 683.641478] env[68217]: value = "vm-594182" [ 683.641478] env[68217]: _type = "VirtualMachine" [ 683.641478] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 683.641977] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the lease: (returnval){ [ 683.641977] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529c4924-972c-8968-e56e-bc61ac44ccf7" [ 683.641977] env[68217]: _type = "HttpNfcLease" [ 683.641977] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 683.651242] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 683.651242] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529c4924-972c-8968-e56e-bc61ac44ccf7" [ 683.651242] env[68217]: _type = "HttpNfcLease" [ 683.651242] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 683.925730] env[68217]: DEBUG nova.compute.manager [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Received event network-changed-6da8ccf1-d23a-4ebd-8053-78f80e526699 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 683.925923] env[68217]: DEBUG nova.compute.manager [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Refreshing instance network info cache due to event network-changed-6da8ccf1-d23a-4ebd-8053-78f80e526699. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 683.926184] env[68217]: DEBUG oslo_concurrency.lockutils [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] Acquiring lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 683.926312] env[68217]: DEBUG oslo_concurrency.lockutils [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] Acquired lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 683.926690] env[68217]: DEBUG nova.network.neutron [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Refreshing network info cache for port 6da8ccf1-d23a-4ebd-8053-78f80e526699 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 683.972583] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960877, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.972785] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960878, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 683.992772] env[68217]: DEBUG nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 684.014428] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 684.014621] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 684.014778] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 684.014956] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 684.015120] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 684.015266] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 684.015466] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 684.015622] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 684.015790] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 684.015948] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 684.016130] env[68217]: DEBUG nova.virt.hardware [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 684.016983] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b129520-65a5-4121-b5ce-bb5ddf4357b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.028649] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55063f84-4805-481e-91ee-182bf3800564 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.091021] env[68217]: ERROR nova.scheduler.client.report [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [req-c1343dd3-f94e-4d0f-a5db-221b187b5450] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c1343dd3-f94e-4d0f-a5db-221b187b5450"}]} [ 684.107608] env[68217]: DEBUG nova.scheduler.client.report [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 684.122023] env[68217]: DEBUG nova.scheduler.client.report [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 684.122380] env[68217]: DEBUG nova.compute.provider_tree [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 684.133631] env[68217]: DEBUG nova.scheduler.client.report [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 684.150351] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 684.150351] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529c4924-972c-8968-e56e-bc61ac44ccf7" [ 684.150351] env[68217]: _type = "HttpNfcLease" [ 684.150351] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 684.150639] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 684.150639] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529c4924-972c-8968-e56e-bc61ac44ccf7" [ 684.150639] env[68217]: _type = "HttpNfcLease" [ 684.150639] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 684.151385] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76df6b65-6177-466b-8a04-51440ba6d965 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.154442] env[68217]: DEBUG nova.scheduler.client.report [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 684.161896] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f2314-869a-322e-b90b-95f742eb2b98/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 684.162095] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f2314-869a-322e-b90b-95f742eb2b98/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 684.248975] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3a493e54-90d1-4482-afc2-d1f218cd7aed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.478460] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960877, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538543} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.484384] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1675982e-0702-482b-9fe6-fd4eb9d83311/1675982e-0702-482b-9fe6-fd4eb9d83311.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 684.484628] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 684.485012] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960878, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.485516] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5886b2ee-6f35-41d9-b67f-7fba11e771e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.495649] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 684.495649] env[68217]: value = "task-2960880" [ 684.495649] env[68217]: _type = "Task" [ 684.495649] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.512425] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960880, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 684.647103] env[68217]: DEBUG nova.compute.manager [req-ff72f372-41ef-4ee2-a57a-0ed06ec81ffb req-a451b849-9a59-41f7-a96e-53c41d1b73a8 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received event network-vif-plugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 684.647534] env[68217]: DEBUG oslo_concurrency.lockutils [req-ff72f372-41ef-4ee2-a57a-0ed06ec81ffb req-a451b849-9a59-41f7-a96e-53c41d1b73a8 service nova] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 684.647952] env[68217]: DEBUG oslo_concurrency.lockutils [req-ff72f372-41ef-4ee2-a57a-0ed06ec81ffb req-a451b849-9a59-41f7-a96e-53c41d1b73a8 service nova] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 684.648327] env[68217]: DEBUG oslo_concurrency.lockutils [req-ff72f372-41ef-4ee2-a57a-0ed06ec81ffb req-a451b849-9a59-41f7-a96e-53c41d1b73a8 service nova] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 684.648397] env[68217]: DEBUG nova.compute.manager [req-ff72f372-41ef-4ee2-a57a-0ed06ec81ffb req-a451b849-9a59-41f7-a96e-53c41d1b73a8 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] No waiting events found dispatching network-vif-plugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 684.648521] env[68217]: WARNING nova.compute.manager [req-ff72f372-41ef-4ee2-a57a-0ed06ec81ffb req-a451b849-9a59-41f7-a96e-53c41d1b73a8 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received unexpected event network-vif-plugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 for instance with vm_state building and task_state spawning. [ 684.690338] env[68217]: DEBUG nova.network.neutron [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Updated VIF entry in instance network info cache for port 6da8ccf1-d23a-4ebd-8053-78f80e526699. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 684.690594] env[68217]: DEBUG nova.network.neutron [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Updating instance_info_cache with network_info: [{"id": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "address": "fa:16:3e:80:24:d0", "network": {"id": "45d10375-0689-4266-8175-4c6cac851356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2078150535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a12aa1bfa73469096139bdc472689ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da8ccf1-d2", "ovs_interfaceid": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 684.697833] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e8f1ffd-9247-4451-a114-94fc97348252 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.703472] env[68217]: DEBUG nova.network.neutron [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Successfully updated port: 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 684.711735] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ca1b69-501c-4df1-837b-ef08c68df3f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.744886] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beda42b0-1733-48db-a989-1e53b020a1ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.753199] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddea4b39-fcf7-4efa-9467-0e8ea0790d76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.767887] env[68217]: DEBUG nova.compute.provider_tree [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 684.974107] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960878, 'name': CreateVM_Task, 'duration_secs': 1.4251} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 684.974458] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 684.975285] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 684.975586] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 684.976126] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 684.976641] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6118fe3-13a7-4b4b-af7a-7f7bf90804f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 684.983654] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 684.983654] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527586bc-1d69-705f-732c-0d117f68ddcd" [ 684.983654] env[68217]: _type = "Task" [ 684.983654] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 684.994506] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527586bc-1d69-705f-732c-0d117f68ddcd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.013695] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960880, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074379} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.014426] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 685.015030] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cedbda5-e779-4dcd-acfe-3b11deb16700 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.038880] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] 1675982e-0702-482b-9fe6-fd4eb9d83311/1675982e-0702-482b-9fe6-fd4eb9d83311.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 685.039993] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d8b03da-1976-4723-97eb-e33e240cbac5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.061118] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 685.061118] env[68217]: value = "task-2960881" [ 685.061118] env[68217]: _type = "Task" [ 685.061118] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.070677] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960881, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.192662] env[68217]: DEBUG oslo_concurrency.lockutils [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] Releasing lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.192973] env[68217]: DEBUG nova.compute.manager [req-5e522436-ccba-4880-a8a0-1c8764c526a5 req-49f40e04-192b-4118-af7a-c3a743213365 service nova] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Received event network-vif-deleted-dbb82d0e-d13c-49a1-8f43-aabdcee3022d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 685.207025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.207132] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.207301] env[68217]: DEBUG nova.network.neutron [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 685.303574] env[68217]: DEBUG nova.scheduler.client.report [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 54 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 685.303856] env[68217]: DEBUG nova.compute.provider_tree [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 54 to 55 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 685.304054] env[68217]: DEBUG nova.compute.provider_tree [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 685.495566] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527586bc-1d69-705f-732c-0d117f68ddcd, 'name': SearchDatastore_Task, 'duration_secs': 0.028561} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.496284] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 685.496523] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 685.496985] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 685.497274] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 685.497566] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 685.498010] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-15c9384b-c6df-46c4-ad83-faa7f79eab5f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.508488] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 685.508784] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 685.509603] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2576cdfa-fce2-42a7-a380-34b1a89fe722 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.516618] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 685.516618] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f628c7-6802-9751-0cf2-5bcf9c5dc7fd" [ 685.516618] env[68217]: _type = "Task" [ 685.516618] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.527167] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f628c7-6802-9751-0cf2-5bcf9c5dc7fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.570977] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960881, 'name': ReconfigVM_Task, 'duration_secs': 0.31652} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 685.571293] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Reconfigured VM instance instance-0000001e to attach disk [datastore1] 1675982e-0702-482b-9fe6-fd4eb9d83311/1675982e-0702-482b-9fe6-fd4eb9d83311.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 685.571998] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d20e1970-522e-4750-8764-4ccd94647eff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.580938] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 685.580938] env[68217]: value = "task-2960882" [ 685.580938] env[68217]: _type = "Task" [ 685.580938] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 685.591167] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960882, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 685.741578] env[68217]: DEBUG nova.network.neutron [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 685.809355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.847s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.809891] env[68217]: DEBUG nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 685.812593] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 31.349s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.812824] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 685.812999] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 685.813387] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.830s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 685.814968] env[68217]: INFO nova.compute.claims [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 685.821851] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25352e4-0d7e-43e5-8e79-7140704f2322 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.831799] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69112e5-27f0-407d-85c4-f4953edacda2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.848548] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff3d9516-9918-45b3-b1b9-6bd1a3102eab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.858848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd34304b-c086-4b5c-934a-c422a956687f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 685.892460] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179221MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 685.892785] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 685.947826] env[68217]: DEBUG nova.network.neutron [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap686a0657-d9", "ovs_interfaceid": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 686.029327] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f628c7-6802-9751-0cf2-5bcf9c5dc7fd, 'name': SearchDatastore_Task, 'duration_secs': 0.012152} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.030372] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cfdbe6e-8676-468d-a091-ad566e8bfa4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.036354] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 686.036354] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527853b5-8b01-f482-b60b-5bb8cf779dd0" [ 686.036354] env[68217]: _type = "Task" [ 686.036354] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.045181] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527853b5-8b01-f482-b60b-5bb8cf779dd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.090826] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960882, 'name': Rename_Task, 'duration_secs': 0.155354} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.091510] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 686.091798] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-516a16ac-a048-47f3-9cf0-193f82c08a04 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.099778] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 686.099778] env[68217]: value = "task-2960883" [ 686.099778] env[68217]: _type = "Task" [ 686.099778] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.109427] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960883, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.323097] env[68217]: DEBUG nova.compute.utils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 686.324577] env[68217]: DEBUG nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 686.324758] env[68217]: DEBUG nova.network.neutron [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 686.376792] env[68217]: DEBUG nova.policy [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84cecdf85a2e4faea7b50a40b15322c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '65d555b32ffa4f649670d75b75eaafb5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 686.452209] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.452644] env[68217]: DEBUG nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Instance network_info: |[{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap686a0657-d9", "ovs_interfaceid": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 686.453211] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:18:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 686.461400] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating folder: Project (023b801c234d47d79cb57ea73058e81c). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 686.461873] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8bb7239e-c5c1-447f-9999-1903560440ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.477426] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Created folder: Project (023b801c234d47d79cb57ea73058e81c) in parent group-v594094. [ 686.477773] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating folder: Instances. Parent ref: group-v594186. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 686.478076] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-701e1c80-bb9d-4b58-97e1-7b19de437221 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.490543] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Created folder: Instances in parent group-v594186. [ 686.494482] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 686.495027] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 686.495432] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-02387aaa-61c4-4cb9-9532-88d0760e22ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.525175] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 686.525175] env[68217]: value = "task-2960886" [ 686.525175] env[68217]: _type = "Task" [ 686.525175] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.535594] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960886, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.547230] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527853b5-8b01-f482-b60b-5bb8cf779dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.01755} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 686.547584] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 686.547844] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a4a88f10-937a-4fa6-aa15-eb7f669e77d0/a4a88f10-937a-4fa6-aa15-eb7f669e77d0.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 686.548154] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e07f2354-f495-4a15-afcc-d7b690a58469 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 686.557948] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 686.557948] env[68217]: value = "task-2960887" [ 686.557948] env[68217]: _type = "Task" [ 686.557948] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 686.567267] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960887, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.612383] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960883, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 686.828089] env[68217]: DEBUG nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 686.836309] env[68217]: DEBUG nova.compute.manager [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received event network-changed-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 686.836512] env[68217]: DEBUG nova.compute.manager [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Refreshing instance network info cache due to event network-changed-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 686.836941] env[68217]: DEBUG oslo_concurrency.lockutils [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] Acquiring lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 686.836941] env[68217]: DEBUG oslo_concurrency.lockutils [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] Acquired lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 686.837081] env[68217]: DEBUG nova.network.neutron [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Refreshing network info cache for port 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 687.017688] env[68217]: DEBUG nova.network.neutron [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Successfully created port: 31b89184-13c6-4151-8829-11780bcfc768 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 687.036855] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960886, 'name': CreateVM_Task, 'duration_secs': 0.469222} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.039972] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 687.041127] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.041415] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.041776] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 687.042159] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42e910e0-f08c-4dcd-9700-234f474bfedc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.051043] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 687.051043] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52200141-e8c5-34bc-418f-1db443bc83db" [ 687.051043] env[68217]: _type = "Task" [ 687.051043] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.066293] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52200141-e8c5-34bc-418f-1db443bc83db, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.074051] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960887, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.115850] env[68217]: DEBUG oslo_vmware.api [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960883, 'name': PowerOnVM_Task, 'duration_secs': 0.664444} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.116192] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 687.116496] env[68217]: INFO nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Took 8.38 seconds to spawn the instance on the hypervisor. [ 687.116763] env[68217]: DEBUG nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 687.117774] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91538a1b-5d94-4fbd-80e3-f83cfa59ca85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.433196] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d653c2f5-144f-4b1a-bf59-1ccb204c43d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.445454] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b5c149-b14e-4454-9750-83fbe7293d2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.481965] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cddc46d8-2c5a-4b56-8836-dbd130a61834 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.493140] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d375b11-de0f-42ac-9d9c-16174e82061a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.512816] env[68217]: DEBUG nova.compute.provider_tree [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 687.562657] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52200141-e8c5-34bc-418f-1db443bc83db, 'name': SearchDatastore_Task, 'duration_secs': 0.022871} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.566103] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 687.566450] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 687.566711] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 687.566974] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 687.567216] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 687.567642] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e4381cf-fc60-4b2e-b496-dceb7636f555 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.575711] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960887, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595168} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 687.577155] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a4a88f10-937a-4fa6-aa15-eb7f669e77d0/a4a88f10-937a-4fa6-aa15-eb7f669e77d0.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 687.577396] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 687.580989] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a6d54b15-6b42-4d6c-a3b7-8257c4c54b18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.584597] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 687.585948] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 687.585948] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a119f23-44a1-4078-aa1d-d0b254588520 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.598262] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 687.598262] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ec2884-4302-a76c-bb03-a8d95d8f1a51" [ 687.598262] env[68217]: _type = "Task" [ 687.598262] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.602249] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 687.602249] env[68217]: value = "task-2960888" [ 687.602249] env[68217]: _type = "Task" [ 687.602249] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 687.610038] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ec2884-4302-a76c-bb03-a8d95d8f1a51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.615801] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960888, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 687.631447] env[68217]: DEBUG nova.network.neutron [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updated VIF entry in instance network info cache for port 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 687.631810] env[68217]: DEBUG nova.network.neutron [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap686a0657-d9", "ovs_interfaceid": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 687.638999] env[68217]: INFO nova.compute.manager [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Took 51.06 seconds to build instance. [ 687.850222] env[68217]: DEBUG nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 687.875894] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 687.876210] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 687.876385] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 687.876571] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 687.876731] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 687.876882] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 687.877107] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 687.877271] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 687.877440] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 687.877601] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 687.877771] env[68217]: DEBUG nova.virt.hardware [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 687.879027] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6d6f6a-2f35-4893-a64c-4e968c7275c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 687.888030] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-659ffe8e-d5dc-4711-bf82-5ad3045216a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.018669] env[68217]: DEBUG nova.scheduler.client.report [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 688.086901] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "1675982e-0702-482b-9fe6-fd4eb9d83311" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.110653] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ec2884-4302-a76c-bb03-a8d95d8f1a51, 'name': SearchDatastore_Task, 'duration_secs': 0.014681} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.112019] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-29c18e13-12ea-4bef-89c3-10450d3c0e02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.117185] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960888, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075046} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.117729] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 688.118523] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89571adc-61e6-4ed5-b64c-b441f1a25e05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.122108] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 688.122108] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e952f0-7d3a-6cd0-8d4d-c8312aeef95b" [ 688.122108] env[68217]: _type = "Task" [ 688.122108] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.145256] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Reconfiguring VM instance instance-0000001f to attach disk [datastore1] a4a88f10-937a-4fa6-aa15-eb7f669e77d0/a4a88f10-937a-4fa6-aa15-eb7f669e77d0.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 688.146288] env[68217]: DEBUG oslo_concurrency.lockutils [req-29456288-550a-46d2-a3ca-e85d591de399 req-9043515b-e5b4-4307-b7a3-b4a68d179cfa service nova] Releasing lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.146787] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79f40aa4-f670-4a2b-8862-870571d12b36 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.953s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.147014] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b7a575d4-a61c-4aea-9bee-121316aa52ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.168056] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.081s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.168295] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "1675982e-0702-482b-9fe6-fd4eb9d83311-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 688.168603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.168704] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.170471] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e952f0-7d3a-6cd0-8d4d-c8312aeef95b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.171550] env[68217]: INFO nova.compute.manager [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Terminating instance [ 688.179779] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 688.179779] env[68217]: value = "task-2960889" [ 688.179779] env[68217]: _type = "Task" [ 688.179779] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.190712] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.524601] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.711s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 688.525067] env[68217]: DEBUG nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 688.528166] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.611s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 688.529552] env[68217]: INFO nova.compute.claims [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 688.634477] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e952f0-7d3a-6cd0-8d4d-c8312aeef95b, 'name': SearchDatastore_Task, 'duration_secs': 0.040085} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.634788] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 688.635434] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 688.635544] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e4580e28-d74f-4dc8-a210-b009119eb966 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.644754] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 688.644754] env[68217]: value = "task-2960890" [ 688.644754] env[68217]: _type = "Task" [ 688.644754] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.652748] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960890, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.672357] env[68217]: DEBUG nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 688.675768] env[68217]: DEBUG nova.compute.manager [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 688.676049] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 688.676964] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efb1f22a-2963-4bbb-b769-4b179ab37d7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.686730] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 688.687431] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e184dd0-0a59-497b-9161-734cebfdfbcd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.692561] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960889, 'name': ReconfigVM_Task, 'duration_secs': 0.321209} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 688.693195] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Reconfigured VM instance instance-0000001f to attach disk [datastore1] a4a88f10-937a-4fa6-aa15-eb7f669e77d0/a4a88f10-937a-4fa6-aa15-eb7f669e77d0.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 688.693517] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e29bd238-d2a6-417d-829e-2577113d62b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 688.697328] env[68217]: DEBUG oslo_vmware.api [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 688.697328] env[68217]: value = "task-2960891" [ 688.697328] env[68217]: _type = "Task" [ 688.697328] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.707388] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 688.707388] env[68217]: value = "task-2960892" [ 688.707388] env[68217]: _type = "Task" [ 688.707388] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 688.717134] env[68217]: DEBUG oslo_vmware.api [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960891, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.724538] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960892, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 688.908984] env[68217]: DEBUG nova.network.neutron [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Successfully updated port: 31b89184-13c6-4151-8829-11780bcfc768 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 689.019711] env[68217]: DEBUG nova.compute.manager [req-9d512834-d2d8-4e07-adb8-259dc8c9e77d req-31d49cdf-a751-4b3b-abc5-f6aeca16608a service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Received event network-vif-plugged-31b89184-13c6-4151-8829-11780bcfc768 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 689.020217] env[68217]: DEBUG oslo_concurrency.lockutils [req-9d512834-d2d8-4e07-adb8-259dc8c9e77d req-31d49cdf-a751-4b3b-abc5-f6aeca16608a service nova] Acquiring lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.020217] env[68217]: DEBUG oslo_concurrency.lockutils [req-9d512834-d2d8-4e07-adb8-259dc8c9e77d req-31d49cdf-a751-4b3b-abc5-f6aeca16608a service nova] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 689.020378] env[68217]: DEBUG oslo_concurrency.lockutils [req-9d512834-d2d8-4e07-adb8-259dc8c9e77d req-31d49cdf-a751-4b3b-abc5-f6aeca16608a service nova] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 689.020547] env[68217]: DEBUG nova.compute.manager [req-9d512834-d2d8-4e07-adb8-259dc8c9e77d req-31d49cdf-a751-4b3b-abc5-f6aeca16608a service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] No waiting events found dispatching network-vif-plugged-31b89184-13c6-4151-8829-11780bcfc768 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 689.020707] env[68217]: WARNING nova.compute.manager [req-9d512834-d2d8-4e07-adb8-259dc8c9e77d req-31d49cdf-a751-4b3b-abc5-f6aeca16608a service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Received unexpected event network-vif-plugged-31b89184-13c6-4151-8829-11780bcfc768 for instance with vm_state building and task_state spawning. [ 689.034923] env[68217]: DEBUG nova.compute.utils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 689.041016] env[68217]: DEBUG nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 689.041297] env[68217]: DEBUG nova.network.neutron [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 689.114382] env[68217]: DEBUG nova.policy [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98519c6a9a164db39df83142383e97aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b99296b92df248d684d9e224d27bdcbc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 689.159918] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960890, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.211295] env[68217]: DEBUG oslo_vmware.api [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960891, 'name': PowerOffVM_Task, 'duration_secs': 0.230474} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.212698] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 689.216573] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 689.216776] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 689.217120] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30b4db36-f17c-42dc-91b3-7097bbce5759 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.228083] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960892, 'name': Rename_Task, 'duration_secs': 0.169824} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.228404] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 689.228697] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9de12f5d-628d-4cb9-8ab5-88a555cd2930 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.237591] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 689.237591] env[68217]: value = "task-2960894" [ 689.237591] env[68217]: _type = "Task" [ 689.237591] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.248080] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960894, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.312246] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 689.312246] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 689.312246] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleting the datastore file [datastore1] 1675982e-0702-482b-9fe6-fd4eb9d83311 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 689.312435] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2d8ed1b6-ac14-4df2-b9c0-219f98443893 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.320958] env[68217]: DEBUG oslo_vmware.api [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 689.320958] env[68217]: value = "task-2960895" [ 689.320958] env[68217]: _type = "Task" [ 689.320958] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.330968] env[68217]: DEBUG oslo_vmware.api [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960895, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.412946] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "refresh_cache-dc45d268-7a7f-4e65-b6fa-942ddba69b03" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 689.413142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquired lock "refresh_cache-dc45d268-7a7f-4e65-b6fa-942ddba69b03" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 689.413312] env[68217]: DEBUG nova.network.neutron [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 689.542086] env[68217]: DEBUG nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 689.664184] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960890, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.607609} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.665684] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 689.665684] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 689.665684] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c71d5fd9-855b-425c-84a9-2364fe3759d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 689.674632] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 689.674632] env[68217]: value = "task-2960896" [ 689.674632] env[68217]: _type = "Task" [ 689.674632] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 689.684417] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960896, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.688800] env[68217]: DEBUG nova.network.neutron [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Successfully created port: 2077ec71-7159-4678-b110-039046651648 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 689.762340] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960894, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 689.832562] env[68217]: DEBUG oslo_vmware.api [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2960895, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.221579} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 689.835678] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 689.835880] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 689.836075] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 689.836270] env[68217]: INFO nova.compute.manager [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Took 1.16 seconds to destroy the instance on the hypervisor. [ 689.836532] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 689.836973] env[68217]: DEBUG nova.compute.manager [-] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 689.837119] env[68217]: DEBUG nova.network.neutron [-] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 689.973966] env[68217]: DEBUG nova.network.neutron [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 690.107100] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b43935c3-1490-4746-9085-8e668cdccce4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.117353] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb048c6-85db-4728-9974-bbfbb35a708e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.156579] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5579c562-e804-4fd2-bbbe-073ffaf9ac6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.166216] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df0ac80-cd42-4f64-a8a4-b1a30a75c037 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.181637] env[68217]: DEBUG nova.compute.provider_tree [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 690.194325] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960896, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.117741} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.194632] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 690.195432] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d451ec20-cc12-4071-8c3a-88e13981e40d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.220295] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 690.221407] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f074d875-8531-4cef-8c02-48945c93d18a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.242975] env[68217]: DEBUG nova.compute.manager [req-f3d393d8-f25f-4c98-8522-b9496dfbc5dd req-ab8207c3-8e7e-46f9-aff6-52eda35ae80a service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Received event network-vif-deleted-105398c1-34ae-4691-9fb2-8cf58f067ec3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 690.243913] env[68217]: INFO nova.compute.manager [req-f3d393d8-f25f-4c98-8522-b9496dfbc5dd req-ab8207c3-8e7e-46f9-aff6-52eda35ae80a service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Neutron deleted interface 105398c1-34ae-4691-9fb2-8cf58f067ec3; detaching it from the instance and deleting it from the info cache [ 690.244321] env[68217]: DEBUG nova.network.neutron [req-f3d393d8-f25f-4c98-8522-b9496dfbc5dd req-ab8207c3-8e7e-46f9-aff6-52eda35ae80a service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.258327] env[68217]: DEBUG oslo_vmware.api [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960894, 'name': PowerOnVM_Task, 'duration_secs': 0.634761} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.260560] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 690.260866] env[68217]: INFO nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Took 8.83 seconds to spawn the instance on the hypervisor. [ 690.263028] env[68217]: DEBUG nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 690.263028] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 690.263028] env[68217]: value = "task-2960897" [ 690.263028] env[68217]: _type = "Task" [ 690.263028] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.263028] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495a36d4-cfb7-4f68-b53b-92c0f03f3ab5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.282024] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960897, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.331536] env[68217]: DEBUG nova.network.neutron [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Updating instance_info_cache with network_info: [{"id": "31b89184-13c6-4151-8829-11780bcfc768", "address": "fa:16:3e:70:a0:1c", "network": {"id": "cb1edef3-c8ed-4e77-b070-b25e6eb7b6bf", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1481238888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65d555b32ffa4f649670d75b75eaafb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31b89184-13", "ovs_interfaceid": "31b89184-13c6-4151-8829-11780bcfc768", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.559302] env[68217]: DEBUG nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 690.587632] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 690.587862] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 690.588179] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 690.588288] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 690.588435] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 690.588514] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 690.588671] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 690.588831] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 690.588995] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 690.589179] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 690.589344] env[68217]: DEBUG nova.virt.hardware [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 690.590279] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90fabcc4-9b4e-4d98-b37d-e0fe86d7242a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.599276] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d62dd95-4d78-4b23-8031-456cd0afd27d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.617491] env[68217]: DEBUG nova.network.neutron [-] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 690.688856] env[68217]: DEBUG nova.scheduler.client.report [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 690.751246] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9185772d-9837-4672-9efa-43405046891c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.761862] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d84b134-1dea-4be3-a9ac-2977bf4aac68 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.785196] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960897, 'name': ReconfigVM_Task, 'duration_secs': 0.477854} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 690.785477] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 690.787610] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f8acdbd3-5508-4fa2-a61b-8d22fa017a05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.801360] env[68217]: DEBUG nova.compute.manager [req-f3d393d8-f25f-4c98-8522-b9496dfbc5dd req-ab8207c3-8e7e-46f9-aff6-52eda35ae80a service nova] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Detach interface failed, port_id=105398c1-34ae-4691-9fb2-8cf58f067ec3, reason: Instance 1675982e-0702-482b-9fe6-fd4eb9d83311 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 690.802243] env[68217]: INFO nova.compute.manager [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Took 45.07 seconds to build instance. [ 690.810530] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 690.810530] env[68217]: value = "task-2960898" [ 690.810530] env[68217]: _type = "Task" [ 690.810530] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.824823] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960898, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 690.833860] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Releasing lock "refresh_cache-dc45d268-7a7f-4e65-b6fa-942ddba69b03" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 690.834244] env[68217]: DEBUG nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Instance network_info: |[{"id": "31b89184-13c6-4151-8829-11780bcfc768", "address": "fa:16:3e:70:a0:1c", "network": {"id": "cb1edef3-c8ed-4e77-b070-b25e6eb7b6bf", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1481238888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65d555b32ffa4f649670d75b75eaafb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31b89184-13", "ovs_interfaceid": "31b89184-13c6-4151-8829-11780bcfc768", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 690.834969] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:70:a0:1c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '54c45719-5690-47bf-b45b-6cad9813071e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '31b89184-13c6-4151-8829-11780bcfc768', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 690.843016] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Creating folder: Project (65d555b32ffa4f649670d75b75eaafb5). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.843316] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94030b09-fa3b-4205-9dae-a9b8eeded25f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.856594] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Created folder: Project (65d555b32ffa4f649670d75b75eaafb5) in parent group-v594094. [ 690.856813] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Creating folder: Instances. Parent ref: group-v594189. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 690.857080] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-93715d2c-e5f5-4499-8ff8-e1462ba30500 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.868121] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Created folder: Instances in parent group-v594189. [ 690.868646] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 690.868747] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 690.868945] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b464d3c-0e45-4fc4-9279-d49215965b0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 690.891729] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 690.891729] env[68217]: value = "task-2960901" [ 690.891729] env[68217]: _type = "Task" [ 690.891729] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 690.901114] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960901, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.049844] env[68217]: DEBUG nova.compute.manager [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Received event network-changed-31b89184-13c6-4151-8829-11780bcfc768 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 691.050674] env[68217]: DEBUG nova.compute.manager [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Refreshing instance network info cache due to event network-changed-31b89184-13c6-4151-8829-11780bcfc768. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 691.051131] env[68217]: DEBUG oslo_concurrency.lockutils [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] Acquiring lock "refresh_cache-dc45d268-7a7f-4e65-b6fa-942ddba69b03" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.051436] env[68217]: DEBUG oslo_concurrency.lockutils [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] Acquired lock "refresh_cache-dc45d268-7a7f-4e65-b6fa-942ddba69b03" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.051663] env[68217]: DEBUG nova.network.neutron [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Refreshing network info cache for port 31b89184-13c6-4151-8829-11780bcfc768 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 691.119765] env[68217]: INFO nova.compute.manager [-] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Took 1.28 seconds to deallocate network for instance. [ 691.196626] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.668s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.197438] env[68217]: DEBUG nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 691.200305] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 31.263s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.200503] env[68217]: DEBUG nova.objects.instance [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 691.304801] env[68217]: DEBUG oslo_concurrency.lockutils [None req-832eb739-4d0e-4f5f-81c9-a2bb7a5a44a2 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.471s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 691.321928] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960898, 'name': Rename_Task, 'duration_secs': 0.216215} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.322940] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 691.323282] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25a7fca8-842e-47ce-9fa5-7235b3310670 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.332452] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 691.332452] env[68217]: value = "task-2960902" [ 691.332452] env[68217]: _type = "Task" [ 691.332452] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.342306] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960902, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.421253] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960901, 'name': CreateVM_Task, 'duration_secs': 0.391097} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.421476] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 691.422367] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.422544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.422842] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 691.423120] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-40c265e8-8d06-49c0-a08d-22ccc23536dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.429833] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 691.429833] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e9537a-0d03-0bd9-41df-1d6080911507" [ 691.429833] env[68217]: _type = "Task" [ 691.429833] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.440199] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e9537a-0d03-0bd9-41df-1d6080911507, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.625980] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.707487] env[68217]: DEBUG nova.compute.utils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 691.707487] env[68217]: DEBUG nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 691.707487] env[68217]: DEBUG nova.network.neutron [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 691.750251] env[68217]: DEBUG nova.policy [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '02581f5c907241928f4f241b4c03a743', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6cd1165637a44528a61171aef40a553', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 691.809747] env[68217]: DEBUG nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 691.848127] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960902, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 691.860174] env[68217]: DEBUG oslo_concurrency.lockutils [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 691.860435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 691.860742] env[68217]: INFO nova.compute.manager [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Rebooting instance [ 691.943339] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e9537a-0d03-0bd9-41df-1d6080911507, 'name': SearchDatastore_Task, 'duration_secs': 0.029711} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 691.943779] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 691.944223] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 691.944470] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 691.944616] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 691.945040] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 691.945356] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3dac1c74-20b8-4dd3-9dc5-07edcd1615fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.956183] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 691.956383] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 691.957190] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-663e14d1-0107-4d95-a462-166feff9b6ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.965697] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 691.965697] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c3ef62-bd33-2475-0ddc-86ce31868ed1" [ 691.965697] env[68217]: _type = "Task" [ 691.965697] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 691.975393] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c3ef62-bd33-2475-0ddc-86ce31868ed1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.091113] env[68217]: DEBUG nova.network.neutron [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Successfully created port: e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 692.185312] env[68217]: DEBUG nova.network.neutron [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Updated VIF entry in instance network info cache for port 31b89184-13c6-4151-8829-11780bcfc768. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 692.185745] env[68217]: DEBUG nova.network.neutron [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Updating instance_info_cache with network_info: [{"id": "31b89184-13c6-4151-8829-11780bcfc768", "address": "fa:16:3e:70:a0:1c", "network": {"id": "cb1edef3-c8ed-4e77-b070-b25e6eb7b6bf", "bridge": "br-int", "label": "tempest-ServerTagsTestJSON-1481238888-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "65d555b32ffa4f649670d75b75eaafb5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "54c45719-5690-47bf-b45b-6cad9813071e", "external-id": "nsx-vlan-transportzone-62", "segmentation_id": 62, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap31b89184-13", "ovs_interfaceid": "31b89184-13c6-4151-8829-11780bcfc768", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 692.213518] env[68217]: DEBUG nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 692.216985] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b4592444-2d03-4183-94e1-a44182f0e0dc tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.017s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 692.218355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.596s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 692.218578] env[68217]: DEBUG nova.objects.instance [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lazy-loading 'resources' on Instance uuid fcddfd72-a130-4efc-82cb-1fb22d33d684 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 692.231201] env[68217]: DEBUG nova.network.neutron [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Successfully updated port: 2077ec71-7159-4678-b110-039046651648 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 692.345070] env[68217]: DEBUG oslo_vmware.api [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2960902, 'name': PowerOnVM_Task, 'duration_secs': 0.634083} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.346101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 692.346406] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 692.347126] env[68217]: INFO nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Took 8.35 seconds to spawn the instance on the hypervisor. [ 692.347126] env[68217]: DEBUG nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 692.348344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd8bda4-51bd-42e3-a6da-b8ac252300ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.398463] env[68217]: DEBUG oslo_concurrency.lockutils [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.398463] env[68217]: DEBUG oslo_concurrency.lockutils [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquired lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.398463] env[68217]: DEBUG nova.network.neutron [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.476623] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c3ef62-bd33-2475-0ddc-86ce31868ed1, 'name': SearchDatastore_Task, 'duration_secs': 0.022256} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 692.477522] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-193b3f5a-11f2-4f78-9008-6807f4252ba3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.483903] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 692.483903] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5269088c-2ef4-dbf6-e5c3-a208e72ff8ec" [ 692.483903] env[68217]: _type = "Task" [ 692.483903] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 692.493661] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5269088c-2ef4-dbf6-e5c3-a208e72ff8ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 692.689304] env[68217]: DEBUG oslo_concurrency.lockutils [req-e34700ff-a21a-4010-bb93-9ba2c0859563 req-fed921ec-9587-4dc9-a8d8-49a750849db2 service nova] Releasing lock "refresh_cache-dc45d268-7a7f-4e65-b6fa-942ddba69b03" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 692.731357] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 692.731713] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 692.731713] env[68217]: DEBUG nova.network.neutron [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 692.871551] env[68217]: INFO nova.compute.manager [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Took 45.32 seconds to build instance. [ 693.000339] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5269088c-2ef4-dbf6-e5c3-a208e72ff8ec, 'name': SearchDatastore_Task, 'duration_secs': 0.014171} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 693.000339] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.000727] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] dc45d268-7a7f-4e65-b6fa-942ddba69b03/dc45d268-7a7f-4e65-b6fa-942ddba69b03.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 693.001032] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d441734f-a721-4e18-8ffb-b250b59507cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.014102] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 693.014102] env[68217]: value = "task-2960903" [ 693.014102] env[68217]: _type = "Task" [ 693.014102] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 693.023198] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960903, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.099179] env[68217]: DEBUG nova.compute.manager [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Received event network-vif-plugged-2077ec71-7159-4678-b110-039046651648 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 693.099421] env[68217]: DEBUG oslo_concurrency.lockutils [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] Acquiring lock "b5e15801-301a-4ee6-87d2-bbf749967631-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.099627] env[68217]: DEBUG oslo_concurrency.lockutils [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] Lock "b5e15801-301a-4ee6-87d2-bbf749967631-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.099796] env[68217]: DEBUG oslo_concurrency.lockutils [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] Lock "b5e15801-301a-4ee6-87d2-bbf749967631-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.099998] env[68217]: DEBUG nova.compute.manager [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] No waiting events found dispatching network-vif-plugged-2077ec71-7159-4678-b110-039046651648 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 693.100502] env[68217]: WARNING nova.compute.manager [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Received unexpected event network-vif-plugged-2077ec71-7159-4678-b110-039046651648 for instance with vm_state building and task_state spawning. [ 693.100677] env[68217]: DEBUG nova.compute.manager [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Received event network-changed-2077ec71-7159-4678-b110-039046651648 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 693.100861] env[68217]: DEBUG nova.compute.manager [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Refreshing instance network info cache due to event network-changed-2077ec71-7159-4678-b110-039046651648. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 693.101100] env[68217]: DEBUG oslo_concurrency.lockutils [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] Acquiring lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 693.230283] env[68217]: DEBUG nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 693.263399] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 693.263888] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 693.263888] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 693.264018] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 693.264248] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 693.264293] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 693.264480] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 693.264624] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 693.264790] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 693.264948] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 693.265134] env[68217]: DEBUG nova.virt.hardware [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 693.266065] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dbd2cfe-62a4-42c9-91c3-976f12ee2e05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.269771] env[68217]: DEBUG nova.network.neutron [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Updating instance_info_cache with network_info: [{"id": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "address": "fa:16:3e:80:24:d0", "network": {"id": "45d10375-0689-4266-8175-4c6cac851356", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-2078150535-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2a12aa1bfa73469096139bdc472689ef", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "271fe7a0-dfd7-409b-920a-cf83ef1a86a3", "external-id": "nsx-vlan-transportzone-728", "segmentation_id": 728, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6da8ccf1-d2", "ovs_interfaceid": "6da8ccf1-d23a-4ebd-8053-78f80e526699", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.274406] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c428cfa4-c92b-4f76-808b-93d9e52e1e4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.279389] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ccfc1e-59f1-4a67-8002-7820f8b9944d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.287057] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d960a3-d4c3-4616-b21a-8a1ed2865905 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.299322] env[68217]: DEBUG nova.network.neutron [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 693.333418] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-453060cb-4e20-4d74-b4fb-5fd42eebfb73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.342225] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ee07217-dad5-4a9e-b492-78fe0ca56fa7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 693.357759] env[68217]: DEBUG nova.compute.provider_tree [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.378970] env[68217]: DEBUG oslo_concurrency.lockutils [None req-29c1d9a5-5429-4489-9e70-e030f67e4e54 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.043s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.531107] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960903, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 693.559221] env[68217]: DEBUG nova.network.neutron [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updating instance_info_cache with network_info: [{"id": "2077ec71-7159-4678-b110-039046651648", "address": "fa:16:3e:d6:50:3d", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2077ec71-71", "ovs_interfaceid": "2077ec71-7159-4678-b110-039046651648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 693.704994] env[68217]: DEBUG nova.network.neutron [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Successfully updated port: e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 693.777939] env[68217]: DEBUG oslo_concurrency.lockutils [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Releasing lock "refresh_cache-a4a88f10-937a-4fa6-aa15-eb7f669e77d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 693.809538] env[68217]: DEBUG nova.compute.manager [req-33f2493c-0990-42d4-be13-f2a2c05fce9d req-f6f189ff-2bff-4e87-a9b0-e4b83baadf45 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Received event network-vif-plugged-e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 693.809665] env[68217]: DEBUG oslo_concurrency.lockutils [req-33f2493c-0990-42d4-be13-f2a2c05fce9d req-f6f189ff-2bff-4e87-a9b0-e4b83baadf45 service nova] Acquiring lock "e089c20e-b788-4e6c-9bd2-9ad485305582-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 693.809787] env[68217]: DEBUG oslo_concurrency.lockutils [req-33f2493c-0990-42d4-be13-f2a2c05fce9d req-f6f189ff-2bff-4e87-a9b0-e4b83baadf45 service nova] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 693.810089] env[68217]: DEBUG oslo_concurrency.lockutils [req-33f2493c-0990-42d4-be13-f2a2c05fce9d req-f6f189ff-2bff-4e87-a9b0-e4b83baadf45 service nova] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 693.810275] env[68217]: DEBUG nova.compute.manager [req-33f2493c-0990-42d4-be13-f2a2c05fce9d req-f6f189ff-2bff-4e87-a9b0-e4b83baadf45 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] No waiting events found dispatching network-vif-plugged-e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 693.810445] env[68217]: WARNING nova.compute.manager [req-33f2493c-0990-42d4-be13-f2a2c05fce9d req-f6f189ff-2bff-4e87-a9b0-e4b83baadf45 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Received unexpected event network-vif-plugged-e1840648-4f87-4974-bc63-bd2b25acab29 for instance with vm_state building and task_state spawning. [ 693.863019] env[68217]: DEBUG nova.scheduler.client.report [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 693.882172] env[68217]: DEBUG nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 694.025009] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960903, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.902922} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.025301] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] dc45d268-7a7f-4e65-b6fa-942ddba69b03/dc45d268-7a7f-4e65-b6fa-942ddba69b03.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 694.025508] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 694.025760] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0a66e9f2-5f14-4767-8fd6-a7c569875b13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.033762] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 694.033762] env[68217]: value = "task-2960904" [ 694.033762] env[68217]: _type = "Task" [ 694.033762] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.044390] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960904, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.063839] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Releasing lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 694.064256] env[68217]: DEBUG nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Instance network_info: |[{"id": "2077ec71-7159-4678-b110-039046651648", "address": "fa:16:3e:d6:50:3d", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2077ec71-71", "ovs_interfaceid": "2077ec71-7159-4678-b110-039046651648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 694.064593] env[68217]: DEBUG oslo_concurrency.lockutils [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] Acquired lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.064837] env[68217]: DEBUG nova.network.neutron [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Refreshing network info cache for port 2077ec71-7159-4678-b110-039046651648 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 694.066107] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d6:50:3d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c330dbdb-ad20-4e7e-8a12-66e4a914a84a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2077ec71-7159-4678-b110-039046651648', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 694.073937] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Creating folder: Project (b99296b92df248d684d9e224d27bdcbc). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.075432] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-57a6c9b5-fbbf-46ab-8032-12609d6a67f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.089708] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Created folder: Project (b99296b92df248d684d9e224d27bdcbc) in parent group-v594094. [ 694.090570] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Creating folder: Instances. Parent ref: group-v594192. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 694.090570] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2fb8e34b-9cf4-4b0d-a9c1-feef29657525 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.105257] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Created folder: Instances in parent group-v594192. [ 694.105257] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 694.105257] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 694.105257] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe6b6e94-a12c-4591-8d09-e4a2816600d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.125938] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 694.125938] env[68217]: value = "task-2960907" [ 694.125938] env[68217]: _type = "Task" [ 694.125938] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.134721] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960907, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.209334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 694.209334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquired lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 694.209334] env[68217]: DEBUG nova.network.neutron [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 694.279461] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "156ea1ad-6e52-4848-915d-7ba74c606e6e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.279934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.284171] env[68217]: DEBUG nova.compute.manager [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 694.285583] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85461bba-f191-4c1f-a104-2814156afa53 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.367336] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.149s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.371148] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f2314-869a-322e-b90b-95f742eb2b98/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 694.371675] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.976s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 694.371919] env[68217]: DEBUG nova.objects.instance [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lazy-loading 'resources' on Instance uuid 00d2302b-84d4-42d8-94c7-caf45b925ddf {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 694.373706] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b008991-f230-4e01-bdce-ab1b08fb27bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.380635] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f2314-869a-322e-b90b-95f742eb2b98/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 694.380802] env[68217]: ERROR oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f2314-869a-322e-b90b-95f742eb2b98/disk-0.vmdk due to incomplete transfer. [ 694.381085] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-04fdafda-4550-4d87-82df-abdd84347592 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.388153] env[68217]: INFO nova.scheduler.client.report [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleted allocations for instance fcddfd72-a130-4efc-82cb-1fb22d33d684 [ 694.394301] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/520f2314-869a-322e-b90b-95f742eb2b98/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 694.394487] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Uploaded image 6bfbe66f-7dc2-4c4a-b095-09003f931d3a to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 694.396482] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 694.396721] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-a4d14ee0-da68-45c3-8ea4-b2e7fadbcc7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.403236] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 694.420912] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 694.420912] env[68217]: value = "task-2960908" [ 694.420912] env[68217]: _type = "Task" [ 694.420912] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.431117] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960908, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.544499] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960904, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072464} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 694.544785] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 694.545785] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c65c749f-c663-47e8-b1fd-45f6aaba8274 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.569098] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Reconfiguring VM instance instance-00000021 to attach disk [datastore2] dc45d268-7a7f-4e65-b6fa-942ddba69b03/dc45d268-7a7f-4e65-b6fa-942ddba69b03.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 694.569851] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eddb72d3-e89b-4b33-94d3-0ce8add99e63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.592868] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 694.592868] env[68217]: value = "task-2960909" [ 694.592868] env[68217]: _type = "Task" [ 694.592868] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 694.605149] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960909, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.639893] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960907, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.754160] env[68217]: DEBUG nova.network.neutron [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 694.829562] env[68217]: DEBUG nova.network.neutron [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updated VIF entry in instance network info cache for port 2077ec71-7159-4678-b110-039046651648. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 694.829944] env[68217]: DEBUG nova.network.neutron [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updating instance_info_cache with network_info: [{"id": "2077ec71-7159-4678-b110-039046651648", "address": "fa:16:3e:d6:50:3d", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2077ec71-71", "ovs_interfaceid": "2077ec71-7159-4678-b110-039046651648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 694.897859] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d10a9c71-0613-4f43-b977-9dcb2775324e tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "fcddfd72-a130-4efc-82cb-1fb22d33d684" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.384s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 694.932109] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960908, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 694.968729] env[68217]: DEBUG nova.network.neutron [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 695.104368] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960909, 'name': ReconfigVM_Task, 'duration_secs': 0.313117} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.107947] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Reconfigured VM instance instance-00000021 to attach disk [datastore2] dc45d268-7a7f-4e65-b6fa-942ddba69b03/dc45d268-7a7f-4e65-b6fa-942ddba69b03.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 695.109069] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2f153825-679c-4ee4-a900-370e033b0038 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.119022] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 695.119022] env[68217]: value = "task-2960910" [ 695.119022] env[68217]: _type = "Task" [ 695.119022] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.129649] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960910, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.141885] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960907, 'name': CreateVM_Task, 'duration_secs': 0.666667} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.141885] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 695.142474] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.143086] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.143086] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 695.144311] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90f99471-df15-4a7a-91a1-b194ef3230e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.151450] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 695.151450] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c993b7-a08f-55fb-ddd3-e0ac5c662864" [ 695.151450] env[68217]: _type = "Task" [ 695.151450] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.160295] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c993b7-a08f-55fb-ddd3-e0ac5c662864, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.306564] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55d60b3-44c6-4743-8df1-693209d11e2b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.315019] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Doing hard reboot of VM {{(pid=68217) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 695.315296] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-fa478698-4413-4b74-b9b6-8e076507699a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.328026] env[68217]: DEBUG oslo_vmware.api [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 695.328026] env[68217]: value = "task-2960911" [ 695.328026] env[68217]: _type = "Task" [ 695.328026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.334257] env[68217]: DEBUG oslo_concurrency.lockutils [req-41ba3a5e-d306-4853-81bc-e18c3aa7c5ea req-ae53ffe5-140e-401c-b61d-51caae0c4ee8 service nova] Releasing lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.341708] env[68217]: DEBUG oslo_vmware.api [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960911, 'name': ResetVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.354939] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.355325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.355581] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 695.355792] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 695.356015] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 695.359314] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73aa1cec-b188-4c3a-b36e-c00719165112 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.362407] env[68217]: INFO nova.compute.manager [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Terminating instance [ 695.369855] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f07a8cd-6844-4686-b694-e225ea14635d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.407025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2338cd42-6bf2-4b60-879e-6557e88f8b3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.421945] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1288b3a8-3d77-4878-a59a-bc465dc708e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.445626] env[68217]: DEBUG nova.compute.provider_tree [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 695.447364] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960908, 'name': Destroy_Task, 'duration_secs': 0.870768} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.448541] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Destroyed the VM [ 695.448846] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 695.449135] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2edd9090-8791-4a71-90e5-8add6e906f6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.458977] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 695.458977] env[68217]: value = "task-2960912" [ 695.458977] env[68217]: _type = "Task" [ 695.458977] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.467905] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960912, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.471569] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Releasing lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.471919] env[68217]: DEBUG nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Instance network_info: |[{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 695.472745] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:ca:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '90c863af-25e3-4fc6-a125-8baa7540298c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e1840648-4f87-4974-bc63-bd2b25acab29', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 695.480975] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Creating folder: Project (a6cd1165637a44528a61171aef40a553). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 695.481393] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9b7bfe86-8ee0-4237-aa67-051b2510619a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.494537] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Created folder: Project (a6cd1165637a44528a61171aef40a553) in parent group-v594094. [ 695.494810] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Creating folder: Instances. Parent ref: group-v594195. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 695.495089] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a361c5bd-5091-49be-812f-0c7a89df8ea4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.506291] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Created folder: Instances in parent group-v594195. [ 695.506980] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 695.506980] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 695.507135] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5289770-03cb-43a6-a862-609209bf0223 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.530044] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 695.530044] env[68217]: value = "task-2960915" [ 695.530044] env[68217]: _type = "Task" [ 695.530044] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.538966] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960915, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.627642] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960910, 'name': Rename_Task, 'duration_secs': 0.190321} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.627986] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 695.628275] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-892d3bb6-c4cc-47b1-9b4d-94e4421416a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.636342] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 695.636342] env[68217]: value = "task-2960916" [ 695.636342] env[68217]: _type = "Task" [ 695.636342] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.645078] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960916, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.661992] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c993b7-a08f-55fb-ddd3-e0ac5c662864, 'name': SearchDatastore_Task, 'duration_secs': 0.019376} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.662332] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 695.662583] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 695.662839] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.662983] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.663192] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 695.663473] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-48331abb-7c7c-4701-83b2-d5fd39c3696c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.673434] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 695.673643] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 695.674448] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-381b6b1a-d6a8-43db-bd75-f794084dab1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.681748] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 695.681748] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210a6d6-7dfd-99d9-11f9-d619202a2f77" [ 695.681748] env[68217]: _type = "Task" [ 695.681748] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.691661] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210a6d6-7dfd-99d9-11f9-d619202a2f77, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.834498] env[68217]: DEBUG nova.compute.manager [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Received event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 695.834498] env[68217]: DEBUG nova.compute.manager [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing instance network info cache due to event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 695.834966] env[68217]: DEBUG oslo_concurrency.lockutils [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] Acquiring lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 695.834966] env[68217]: DEBUG oslo_concurrency.lockutils [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] Acquired lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 695.834966] env[68217]: DEBUG nova.network.neutron [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 695.842255] env[68217]: DEBUG oslo_vmware.api [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960911, 'name': ResetVM_Task, 'duration_secs': 0.108644} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 695.842737] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Did hard reboot of VM {{(pid=68217) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 695.842934] env[68217]: DEBUG nova.compute.manager [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 695.843729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed5fadb-9098-45d2-89a2-c8f5501bbc37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.866339] env[68217]: DEBUG nova.compute.manager [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 695.866592] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 695.867738] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-132989d7-67ea-4b3d-966a-0e0310bc4f45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.876989] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 695.878209] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4905d45-3cf6-441c-8899-e9af00a3598b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 695.885913] env[68217]: DEBUG oslo_vmware.api [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 695.885913] env[68217]: value = "task-2960917" [ 695.885913] env[68217]: _type = "Task" [ 695.885913] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 695.898813] env[68217]: DEBUG oslo_vmware.api [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960917, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.971133] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960912, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 695.977398] env[68217]: ERROR nova.scheduler.client.report [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] [req-e820f823-c52a-441a-804a-c399e7e97d6e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-e820f823-c52a-441a-804a-c399e7e97d6e"}]} [ 695.996356] env[68217]: DEBUG nova.scheduler.client.report [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 696.013657] env[68217]: DEBUG nova.scheduler.client.report [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 696.013893] env[68217]: DEBUG nova.compute.provider_tree [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 696.027685] env[68217]: DEBUG nova.scheduler.client.report [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 696.042516] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960915, 'name': CreateVM_Task, 'duration_secs': 0.454143} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.042750] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 696.043603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.043801] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.044184] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 696.044489] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3af2b56f-6121-43a8-b1d4-2c5c1a6a68cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.049941] env[68217]: DEBUG nova.scheduler.client.report [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 696.055181] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 696.055181] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e30fd-e23f-7d07-c45a-8a7a38089c1b" [ 696.055181] env[68217]: _type = "Task" [ 696.055181] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.066105] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e30fd-e23f-7d07-c45a-8a7a38089c1b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.152846] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960916, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.195626] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210a6d6-7dfd-99d9-11f9-d619202a2f77, 'name': SearchDatastore_Task, 'duration_secs': 0.010795} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.196517] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc3879e9-1f93-4c40-8bff-a757c65fcc64 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.205425] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 696.205425] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524eedac-090e-9124-62e7-d57ff3984eb0" [ 696.205425] env[68217]: _type = "Task" [ 696.205425] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.213927] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524eedac-090e-9124-62e7-d57ff3984eb0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.360665] env[68217]: DEBUG oslo_concurrency.lockutils [None req-61de7b0d-666a-4563-817f-2c3ef3a21b31 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.500s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 696.401510] env[68217]: DEBUG oslo_vmware.api [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960917, 'name': PowerOffVM_Task, 'duration_secs': 0.239987} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.402024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 696.402433] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 696.402529] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9b6c1456-6b8f-4193-a747-f03ea3a92415 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.469742] env[68217]: DEBUG oslo_vmware.api [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960912, 'name': RemoveSnapshot_Task, 'duration_secs': 0.773083} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.472159] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 696.472411] env[68217]: INFO nova.compute.manager [None req-b4168c06-93a7-435c-a78d-26a24be19741 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Took 16.49 seconds to snapshot the instance on the hypervisor. [ 696.514378] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 696.514378] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 696.514378] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleting the datastore file [datastore2] 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 696.516290] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6b0dc623-919c-4b33-8883-b6909a229135 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.524506] env[68217]: DEBUG oslo_vmware.api [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for the task: (returnval){ [ 696.524506] env[68217]: value = "task-2960919" [ 696.524506] env[68217]: _type = "Task" [ 696.524506] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.545647] env[68217]: DEBUG oslo_vmware.api [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960919, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.551664] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40a10a55-34c3-4f14-bf6b-cc650a2265a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.562055] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c159be3-b084-47cb-a0ad-cbd0c95d3e18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.568848] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524e30fd-e23f-7d07-c45a-8a7a38089c1b, 'name': SearchDatastore_Task, 'duration_secs': 0.01939} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.569473] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.569700] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 696.569924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 696.601405] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7d51e9-124f-420f-8b97-029c8f4aaa23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.610040] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7269348e-5fdd-4f7f-a3a6-b7f33c54bfc6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.625067] env[68217]: DEBUG nova.compute.provider_tree [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 696.645951] env[68217]: DEBUG oslo_vmware.api [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960916, 'name': PowerOnVM_Task, 'duration_secs': 0.5859} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.646225] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 696.646478] env[68217]: INFO nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Took 8.80 seconds to spawn the instance on the hypervisor. [ 696.646552] env[68217]: DEBUG nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 696.647712] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b9d967-e679-4399-a600-a0994531c789 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.716073] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524eedac-090e-9124-62e7-d57ff3984eb0, 'name': SearchDatastore_Task, 'duration_secs': 0.019352} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 696.716301] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 696.716548] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b5e15801-301a-4ee6-87d2-bbf749967631/b5e15801-301a-4ee6-87d2-bbf749967631.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 696.716812] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 696.716995] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 696.717213] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d68ecce2-90a6-41ef-a83b-d00262086862 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.719237] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ece188c0-f842-453e-b82e-ebdcf8da3e6a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.728176] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 696.728176] env[68217]: value = "task-2960920" [ 696.728176] env[68217]: _type = "Task" [ 696.728176] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.734474] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 696.734657] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 696.735765] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c836bd45-2702-4893-a2db-671bbc094bdc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.740883] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960920, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 696.742367] env[68217]: DEBUG nova.network.neutron [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updated VIF entry in instance network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 696.742715] env[68217]: DEBUG nova.network.neutron [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 696.745205] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 696.745205] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5289ad26-8dd9-4ff5-7dc5-ac6425f472ae" [ 696.745205] env[68217]: _type = "Task" [ 696.745205] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 696.754812] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5289ad26-8dd9-4ff5-7dc5-ac6425f472ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.037656] env[68217]: DEBUG oslo_vmware.api [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Task: {'id': task-2960919, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.476967} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.037656] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 697.037656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 697.037656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 697.037656] env[68217]: INFO nova.compute.manager [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Took 1.17 seconds to destroy the instance on the hypervisor. [ 697.038131] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 697.038131] env[68217]: DEBUG nova.compute.manager [-] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 697.038131] env[68217]: DEBUG nova.network.neutron [-] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 697.166761] env[68217]: INFO nova.compute.manager [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Took 46.02 seconds to build instance. [ 697.169745] env[68217]: DEBUG nova.scheduler.client.report [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 56 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 697.170047] env[68217]: DEBUG nova.compute.provider_tree [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 56 to 57 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 697.170240] env[68217]: DEBUG nova.compute.provider_tree [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 697.240307] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960920, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.249942] env[68217]: DEBUG oslo_concurrency.lockutils [req-cfb5a8c1-1181-4bec-a15c-7861580d5ff2 req-31786111-e981-4a7a-aed8-527b9f66b209 service nova] Releasing lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.257717] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5289ad26-8dd9-4ff5-7dc5-ac6425f472ae, 'name': SearchDatastore_Task, 'duration_secs': 0.023332} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.258588] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9799fb3-1f79-4d13-94bc-275cd066a7a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.266888] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 697.266888] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8a615-085d-fe2b-4a5a-bc6177625d6d" [ 697.266888] env[68217]: _type = "Task" [ 697.266888] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.280025] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8a615-085d-fe2b-4a5a-bc6177625d6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.563389] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.563506] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.563731] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 697.563917] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.564172] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.567036] env[68217]: INFO nova.compute.manager [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Terminating instance [ 697.623110] env[68217]: DEBUG nova.compute.manager [req-05bf02df-9144-4236-9765-652fbe94baba req-505a763e-ae2a-4b02-9cfa-3e011cd301d9 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Received event network-vif-deleted-852ba444-6eea-4b2f-bbd8-58cdde27ee66 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 697.623280] env[68217]: INFO nova.compute.manager [req-05bf02df-9144-4236-9765-652fbe94baba req-505a763e-ae2a-4b02-9cfa-3e011cd301d9 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Neutron deleted interface 852ba444-6eea-4b2f-bbd8-58cdde27ee66; detaching it from the instance and deleting it from the info cache [ 697.623474] env[68217]: DEBUG nova.network.neutron [req-05bf02df-9144-4236-9765-652fbe94baba req-505a763e-ae2a-4b02-9cfa-3e011cd301d9 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.669413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-76e2f4fb-a610-4088-816e-4a3b9fb3414b tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.210s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.677223] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.305s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 697.680727] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 28.970s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 697.681534] env[68217]: INFO nova.compute.claims [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.704123] env[68217]: INFO nova.scheduler.client.report [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Deleted allocations for instance 00d2302b-84d4-42d8-94c7-caf45b925ddf [ 697.742567] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960920, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.763379} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.742837] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b5e15801-301a-4ee6-87d2-bbf749967631/b5e15801-301a-4ee6-87d2-bbf749967631.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 697.743114] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 697.743372] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7c5196fe-ba14-4470-a09a-73b8fc744a15 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.751012] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 697.751012] env[68217]: value = "task-2960921" [ 697.751012] env[68217]: _type = "Task" [ 697.751012] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.762283] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960921, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.777441] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8a615-085d-fe2b-4a5a-bc6177625d6d, 'name': SearchDatastore_Task, 'duration_secs': 0.058287} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 697.777441] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 697.777725] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e089c20e-b788-4e6c-9bd2-9ad485305582/e089c20e-b788-4e6c-9bd2-9ad485305582.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 697.777856] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-20cd5274-7305-438c-91d6-87451016a2bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.786657] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 697.786657] env[68217]: value = "task-2960922" [ 697.786657] env[68217]: _type = "Task" [ 697.786657] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 697.796567] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 697.960080] env[68217]: DEBUG nova.network.neutron [-] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.075945] env[68217]: DEBUG nova.compute.manager [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 698.075945] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 698.075945] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f11b8c0-149d-47d9-b463-f48ff4a88f9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.082034] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 698.082366] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a71d2d41-5185-433d-9ad8-2f8aedc7db0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.090199] env[68217]: DEBUG oslo_vmware.api [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 698.090199] env[68217]: value = "task-2960923" [ 698.090199] env[68217]: _type = "Task" [ 698.090199] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.105116] env[68217]: DEBUG oslo_vmware.api [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.125864] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8e56a01-67fd-413d-a5dc-53daed485930 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.136389] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f65c2e08-c56f-49a2-b7e7-da6b7a2b2487 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.170240] env[68217]: DEBUG nova.compute.manager [req-05bf02df-9144-4236-9765-652fbe94baba req-505a763e-ae2a-4b02-9cfa-3e011cd301d9 service nova] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Detach interface failed, port_id=852ba444-6eea-4b2f-bbd8-58cdde27ee66, reason: Instance 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 698.171931] env[68217]: DEBUG nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 698.215239] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b563dc5f-ddb5-4eb8-95a6-1b507d759de8 tempest-ServerShowV247Test-2063235909 tempest-ServerShowV247Test-2063235909-project-member] Lock "00d2302b-84d4-42d8-94c7-caf45b925ddf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.657s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.261677] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960921, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074082} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.261984] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 698.262838] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f9225c5-cb7f-44e5-b0dd-0a909b0d2303 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.288426] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Reconfiguring VM instance instance-00000022 to attach disk [datastore2] b5e15801-301a-4ee6-87d2-bbf749967631/b5e15801-301a-4ee6-87d2-bbf749967631.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 698.288426] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09d905b8-b7cb-4c0f-9666-bbddfb869b79 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.313442] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960922, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.315529] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 698.315529] env[68217]: value = "task-2960924" [ 698.315529] env[68217]: _type = "Task" [ 698.315529] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.325480] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960924, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.464604] env[68217]: INFO nova.compute.manager [-] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Took 1.43 seconds to deallocate network for instance. [ 698.602589] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "f748cf37-6605-49a2-a418-51667a0fac4a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.602856] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "f748cf37-6605-49a2-a418-51667a0fac4a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.603069] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "f748cf37-6605-49a2-a418-51667a0fac4a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.603254] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "f748cf37-6605-49a2-a418-51667a0fac4a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 698.603420] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "f748cf37-6605-49a2-a418-51667a0fac4a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 698.605075] env[68217]: DEBUG oslo_vmware.api [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960923, 'name': PowerOffVM_Task, 'duration_secs': 0.29741} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.605630] env[68217]: INFO nova.compute.manager [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Terminating instance [ 698.607160] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 698.607265] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 698.607475] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-30deb58c-ad5a-4c68-9a8d-25133745315d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.701073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 698.799385] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960922, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.888467} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 698.803059] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e089c20e-b788-4e6c-9bd2-9ad485305582/e089c20e-b788-4e6c-9bd2-9ad485305582.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 698.803395] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 698.804177] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4e9dfcf8-0616-4156-811e-e0c13416599b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 698.812707] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 698.812707] env[68217]: value = "task-2960926" [ 698.812707] env[68217]: _type = "Task" [ 698.812707] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 698.828966] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960926, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.832247] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960924, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 698.971054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.114438] env[68217]: DEBUG nova.compute.manager [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 699.114726] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.121257] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c9edd51-1581-49f6-be23-0ceb87e7d78b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.129961] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 699.130236] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc0227d6-43bf-41b7-a140-cb7c8fd549c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.141802] env[68217]: DEBUG oslo_vmware.api [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 699.141802] env[68217]: value = "task-2960927" [ 699.141802] env[68217]: _type = "Task" [ 699.141802] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.151335] env[68217]: DEBUG oslo_vmware.api [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960927, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.153772] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee04700b-e517-4394-95ec-06c88096a872 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.162354] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d320994-25d8-4ab4-81b4-d13853fe627a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.198460] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a6029ce-639a-4de1-8d19-fd6f52e94d2a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.207683] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97685c4f-c1b6-4899-b094-d8cdf9d76769 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.229852] env[68217]: DEBUG nova.compute.provider_tree [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.248915] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 699.249145] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 699.249318] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Deleting the datastore file [datastore1] a4a88f10-937a-4fa6-aa15-eb7f669e77d0 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 699.249581] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d686c867-31b6-48c4-a6d9-1fee6ccb9c1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.256922] env[68217]: DEBUG oslo_vmware.api [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for the task: (returnval){ [ 699.256922] env[68217]: value = "task-2960928" [ 699.256922] env[68217]: _type = "Task" [ 699.256922] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.265883] env[68217]: DEBUG oslo_vmware.api [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960928, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.326164] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960926, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086633} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.327049] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 699.328035] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808e9ee3-757b-4fb7-a369-31dab0747e05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.333781] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960924, 'name': ReconfigVM_Task, 'duration_secs': 0.537543} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.334392] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Reconfigured VM instance instance-00000022 to attach disk [datastore2] b5e15801-301a-4ee6-87d2-bbf749967631/b5e15801-301a-4ee6-87d2-bbf749967631.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 699.335067] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1daaf0ec-ac71-4814-bcbe-00bb0e167195 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.354395] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Reconfiguring VM instance instance-00000023 to attach disk [datastore2] e089c20e-b788-4e6c-9bd2-9ad485305582/e089c20e-b788-4e6c-9bd2-9ad485305582.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 699.356271] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d50142b7-84e6-4d7b-bd40-070e5de4d166 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.375473] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 699.375473] env[68217]: value = "task-2960929" [ 699.375473] env[68217]: _type = "Task" [ 699.375473] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.384596] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 699.384596] env[68217]: value = "task-2960930" [ 699.384596] env[68217]: _type = "Task" [ 699.384596] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.394695] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960929, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.402365] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960930, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.651892] env[68217]: DEBUG oslo_vmware.api [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960927, 'name': PowerOffVM_Task, 'duration_secs': 0.430075} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.652192] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 699.652439] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 699.652680] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8dd74d7f-1409-4999-b856-2838d9ba7b27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.737105] env[68217]: DEBUG nova.scheduler.client.report [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 699.774769] env[68217]: DEBUG oslo_vmware.api [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Task: {'id': task-2960928, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.360769} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.775043] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 699.775236] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 699.775403] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.775570] env[68217]: INFO nova.compute.manager [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Took 1.70 seconds to destroy the instance on the hypervisor. [ 699.775816] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 699.776014] env[68217]: DEBUG nova.compute.manager [-] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 699.776139] env[68217]: DEBUG nova.network.neutron [-] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 699.782444] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 699.782709] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 699.782937] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Deleting the datastore file [datastore2] f748cf37-6605-49a2-a418-51667a0fac4a {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 699.783440] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-88f85775-9f39-448b-b372-273406c13a59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.792161] env[68217]: DEBUG oslo_vmware.api [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for the task: (returnval){ [ 699.792161] env[68217]: value = "task-2960932" [ 699.792161] env[68217]: _type = "Task" [ 699.792161] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.800805] env[68217]: DEBUG oslo_vmware.api [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960932, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.876478] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.876818] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.877193] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 699.877307] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 699.877510] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 699.879827] env[68217]: INFO nova.compute.manager [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Terminating instance [ 699.897555] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960929, 'name': Rename_Task, 'duration_secs': 0.153899} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 699.900298] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 699.900595] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960930, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 699.900761] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32d791c8-6c1a-4b5c-a434-a1bbac9311a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.909392] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 699.909392] env[68217]: value = "task-2960933" [ 699.909392] env[68217]: _type = "Task" [ 699.909392] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 699.924220] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960933, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.242780] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.563s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 700.243402] env[68217]: DEBUG nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 700.247279] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 29.110s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 700.247523] env[68217]: DEBUG nova.objects.instance [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 700.304782] env[68217]: DEBUG oslo_vmware.api [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Task: {'id': task-2960932, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.352803} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.305217] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 700.305359] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 700.305472] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 700.305657] env[68217]: INFO nova.compute.manager [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Took 1.19 seconds to destroy the instance on the hypervisor. [ 700.305903] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 700.306762] env[68217]: DEBUG nova.compute.manager [-] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 700.306762] env[68217]: DEBUG nova.network.neutron [-] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 700.388254] env[68217]: DEBUG nova.compute.manager [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 700.388490] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 700.393120] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ec7188-fa03-41a4-997a-ec6281e00e49 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.408029] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 700.408029] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7079db7a-fbf6-4236-8f3d-e0b297103c93 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.409739] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960930, 'name': ReconfigVM_Task, 'duration_secs': 0.78895} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.410188] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Reconfigured VM instance instance-00000023 to attach disk [datastore2] e089c20e-b788-4e6c-9bd2-9ad485305582/e089c20e-b788-4e6c-9bd2-9ad485305582.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 700.416372] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b8bda3ec-1522-478c-b8af-2c91f2849ef5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.424812] env[68217]: DEBUG oslo_vmware.api [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2960933, 'name': PowerOnVM_Task, 'duration_secs': 0.486482} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.427444] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 700.427651] env[68217]: INFO nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Took 9.87 seconds to spawn the instance on the hypervisor. [ 700.427820] env[68217]: DEBUG nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 700.428215] env[68217]: DEBUG oslo_vmware.api [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 700.428215] env[68217]: value = "task-2960934" [ 700.428215] env[68217]: _type = "Task" [ 700.428215] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.428453] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 700.428453] env[68217]: value = "task-2960935" [ 700.428453] env[68217]: _type = "Task" [ 700.428453] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.430173] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42bd274e-20c3-4122-957a-fd423954b2da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.454825] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960935, 'name': Rename_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.455122] env[68217]: DEBUG oslo_vmware.api [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960934, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.667339] env[68217]: DEBUG nova.compute.manager [req-b75fcb4d-feda-4c16-b9c7-1beedff8991c req-b5cc00c8-6154-4898-8bc7-1e444a786738 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Received event network-vif-deleted-6da8ccf1-d23a-4ebd-8053-78f80e526699 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 700.667537] env[68217]: INFO nova.compute.manager [req-b75fcb4d-feda-4c16-b9c7-1beedff8991c req-b5cc00c8-6154-4898-8bc7-1e444a786738 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Neutron deleted interface 6da8ccf1-d23a-4ebd-8053-78f80e526699; detaching it from the instance and deleting it from the info cache [ 700.667711] env[68217]: DEBUG nova.network.neutron [req-b75fcb4d-feda-4c16-b9c7-1beedff8991c req-b5cc00c8-6154-4898-8bc7-1e444a786738 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.753533] env[68217]: DEBUG nova.compute.utils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 700.758234] env[68217]: DEBUG nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 700.758234] env[68217]: DEBUG nova.network.neutron [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 700.828667] env[68217]: DEBUG nova.policy [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '421e01339dc845ccb7341fd3b5c90ebb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87c7e14d2fe94e58bb3df92a8841486b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 700.905788] env[68217]: DEBUG nova.network.neutron [-] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.946851] env[68217]: DEBUG oslo_vmware.api [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960934, 'name': PowerOffVM_Task, 'duration_secs': 0.229609} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.950903] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 700.950903] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 700.951144] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960935, 'name': Rename_Task, 'duration_secs': 0.190732} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 700.954906] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b0389762-09dd-4b31-a816-b05db8b48640 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.954906] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 700.954906] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3525de71-f742-4296-a9b1-55fa1c5c0db8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 700.963804] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 700.963804] env[68217]: value = "task-2960936" [ 700.963804] env[68217]: _type = "Task" [ 700.963804] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 700.967519] env[68217]: INFO nova.compute.manager [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Took 45.00 seconds to build instance. [ 700.981938] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960936, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 700.997609] env[68217]: DEBUG nova.compute.manager [req-21cfeffc-b96d-4756-933e-a4d13374fc0f req-828d6800-0e4f-4045-9bda-4e2591cf8fbe service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Received event network-vif-deleted-a26f19ed-bde8-4dee-bd59-7a0ed95f7659 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 700.997865] env[68217]: INFO nova.compute.manager [req-21cfeffc-b96d-4756-933e-a4d13374fc0f req-828d6800-0e4f-4045-9bda-4e2591cf8fbe service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Neutron deleted interface a26f19ed-bde8-4dee-bd59-7a0ed95f7659; detaching it from the instance and deleting it from the info cache [ 700.998151] env[68217]: DEBUG nova.network.neutron [req-21cfeffc-b96d-4756-933e-a4d13374fc0f req-828d6800-0e4f-4045-9bda-4e2591cf8fbe service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.050583] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 701.050800] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 701.051087] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Deleting the datastore file [datastore2] dc45d268-7a7f-4e65-b6fa-942ddba69b03 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 701.051303] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7fb96a4a-1a86-46ee-8785-2d3c14b6ce78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.059711] env[68217]: DEBUG oslo_vmware.api [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for the task: (returnval){ [ 701.059711] env[68217]: value = "task-2960938" [ 701.059711] env[68217]: _type = "Task" [ 701.059711] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 701.070255] env[68217]: DEBUG oslo_vmware.api [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960938, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.170749] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b261d22c-06eb-41fe-90e9-b246beb66723 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.182440] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8837c764-3e94-407c-9210-34b8167d4c31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.228421] env[68217]: DEBUG nova.compute.manager [req-b75fcb4d-feda-4c16-b9c7-1beedff8991c req-b5cc00c8-6154-4898-8bc7-1e444a786738 service nova] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Detach interface failed, port_id=6da8ccf1-d23a-4ebd-8053-78f80e526699, reason: Instance a4a88f10-937a-4fa6-aa15-eb7f669e77d0 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 701.259368] env[68217]: DEBUG nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 701.267726] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4636f589-be62-4067-b7c7-015cb4942a08 tempest-ServersAdmin275Test-139709307 tempest-ServersAdmin275Test-139709307-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.019s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.273886] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.885s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 701.273886] env[68217]: DEBUG nova.objects.instance [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lazy-loading 'resources' on Instance uuid 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 701.277946] env[68217]: DEBUG nova.network.neutron [-] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 701.341456] env[68217]: DEBUG nova.network.neutron [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Successfully created port: 396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.414535] env[68217]: INFO nova.compute.manager [-] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Took 1.64 seconds to deallocate network for instance. [ 701.471652] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9269d4d-2e51-42d7-bbca-aff2ae90b691 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "b5e15801-301a-4ee6-87d2-bbf749967631" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 87.373s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 701.484634] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960936, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 701.503042] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05704079-9647-4ea2-9add-bae198980e82 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.516882] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558d6812-c897-41cd-a184-d7c6206595a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.565264] env[68217]: DEBUG nova.compute.manager [req-21cfeffc-b96d-4756-933e-a4d13374fc0f req-828d6800-0e4f-4045-9bda-4e2591cf8fbe service nova] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Detach interface failed, port_id=a26f19ed-bde8-4dee-bd59-7a0ed95f7659, reason: Instance f748cf37-6605-49a2-a418-51667a0fac4a could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 701.581786] env[68217]: DEBUG oslo_vmware.api [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Task: {'id': task-2960938, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.276337} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.582520] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 701.582937] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 701.583550] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 701.583950] env[68217]: INFO nova.compute.manager [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Took 1.20 seconds to destroy the instance on the hypervisor. [ 701.586058] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 701.586058] env[68217]: DEBUG nova.compute.manager [-] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 701.586058] env[68217]: DEBUG nova.network.neutron [-] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 701.782789] env[68217]: INFO nova.compute.manager [-] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Took 1.48 seconds to deallocate network for instance. [ 701.922465] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 701.975939] env[68217]: DEBUG nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 701.980033] env[68217]: DEBUG oslo_vmware.api [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960936, 'name': PowerOnVM_Task, 'duration_secs': 0.563514} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 701.980310] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 701.980512] env[68217]: INFO nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Took 8.75 seconds to spawn the instance on the hypervisor. [ 701.980685] env[68217]: DEBUG nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 701.981775] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2621563-72a9-4361-81bf-961163673a9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.282037] env[68217]: DEBUG nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 702.293587] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.324271] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 702.324553] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.324908] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 702.324908] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.325758] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 702.325945] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 702.326196] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 702.326705] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 702.326705] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 702.326819] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 702.326904] env[68217]: DEBUG nova.virt.hardware [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 702.327860] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b217e9f-e8f7-4d4b-b897-ab62f4257fcd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.333447] env[68217]: DEBUG nova.network.neutron [-] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.343984] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3de19803-bce5-41d7-b99e-a9011eaf9135 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.365256] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7080ef5-4a24-4b84-8423-9569455e39c8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.375111] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7293c03-5cb1-40f1-a0b1-8bc71b478ac9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.409911] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f0f60e4-14d3-48e9-bc66-6dfea394ecb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.419733] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eb3ed1-514a-403c-8f7b-840f1943eb8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.434277] env[68217]: DEBUG nova.compute.provider_tree [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 702.517151] env[68217]: INFO nova.compute.manager [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Took 42.61 seconds to build instance. [ 702.520984] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 702.838478] env[68217]: INFO nova.compute.manager [-] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Took 1.25 seconds to deallocate network for instance. [ 702.942592] env[68217]: DEBUG nova.scheduler.client.report [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 703.019710] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d52f6743-1bdf-41f5-a1da-cb6f34969f57 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 83.178s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.036113] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "83d32dd6-2629-4451-a746-bf5270083e2a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.036402] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "83d32dd6-2629-4451-a746-bf5270083e2a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.036600] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "83d32dd6-2629-4451-a746-bf5270083e2a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.036776] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "83d32dd6-2629-4451-a746-bf5270083e2a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.037593] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "83d32dd6-2629-4451-a746-bf5270083e2a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.039681] env[68217]: INFO nova.compute.manager [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Terminating instance [ 703.046079] env[68217]: DEBUG nova.compute.manager [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Received event network-changed-2077ec71-7159-4678-b110-039046651648 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.046263] env[68217]: DEBUG nova.compute.manager [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Refreshing instance network info cache due to event network-changed-2077ec71-7159-4678-b110-039046651648. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 703.046462] env[68217]: DEBUG oslo_concurrency.lockutils [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] Acquiring lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.046596] env[68217]: DEBUG oslo_concurrency.lockutils [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] Acquired lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.046753] env[68217]: DEBUG nova.network.neutron [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Refreshing network info cache for port 2077ec71-7159-4678-b110-039046651648 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 703.345663] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 703.449231] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.175s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 703.452721] env[68217]: DEBUG nova.network.neutron [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Successfully updated port: 396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 703.454671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 32.048s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 703.458171] env[68217]: INFO nova.compute.claims [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 703.484199] env[68217]: INFO nova.scheduler.client.report [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Deleted allocations for instance 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be [ 703.526037] env[68217]: DEBUG nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 703.543363] env[68217]: DEBUG nova.compute.manager [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 703.543554] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 703.544947] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262424bf-09f2-4e60-ae2d-786faccf6d76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.560249] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 703.560581] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-010c4f61-1970-47ce-8b87-e4504b54f8d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.569291] env[68217]: DEBUG oslo_vmware.api [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 703.569291] env[68217]: value = "task-2960939" [ 703.569291] env[68217]: _type = "Task" [ 703.569291] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 703.583043] env[68217]: DEBUG oslo_vmware.api [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960939, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 703.746785] env[68217]: DEBUG nova.compute.manager [req-10ef3bce-d494-4600-b9ed-60720f990ad0 req-37a81097-8e63-446d-8fa1-ca582dfe24e6 service nova] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Received event network-vif-deleted-31b89184-13c6-4151-8829-11780bcfc768 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 703.893590] env[68217]: DEBUG nova.network.neutron [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updated VIF entry in instance network info cache for port 2077ec71-7159-4678-b110-039046651648. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 703.895184] env[68217]: DEBUG nova.network.neutron [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updating instance_info_cache with network_info: [{"id": "2077ec71-7159-4678-b110-039046651648", "address": "fa:16:3e:d6:50:3d", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2077ec71-71", "ovs_interfaceid": "2077ec71-7159-4678-b110-039046651648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.963369] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 703.963523] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 703.964066] env[68217]: DEBUG nova.network.neutron [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 703.992569] env[68217]: DEBUG oslo_concurrency.lockutils [None req-edad5cff-d1e5-4a60-9e75-74ff798ca0b7 tempest-ServerAddressesNegativeTestJSON-1434461166 tempest-ServerAddressesNegativeTestJSON-1434461166-project-member] Lock "8fcccac2-dae1-4af0-a2b2-787e1bb7c9be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.143s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 704.053487] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.082531] env[68217]: DEBUG oslo_vmware.api [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960939, 'name': PowerOffVM_Task, 'duration_secs': 0.202968} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.082531] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 704.082531] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 704.082531] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5f589e6a-7a52-463b-99fd-beba39368efb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.129437] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "7584180b-efa6-4038-9f3a-619ab7937553" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.130282] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "7584180b-efa6-4038-9f3a-619ab7937553" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.158066] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 704.158066] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 704.158066] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Deleting the datastore file [datastore1] 83d32dd6-2629-4451-a746-bf5270083e2a {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 704.158066] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bf9c5df-e3fc-4fbf-874a-f149e3a1ba28 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.166354] env[68217]: DEBUG oslo_vmware.api [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 704.166354] env[68217]: value = "task-2960941" [ 704.166354] env[68217]: _type = "Task" [ 704.166354] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 704.177333] env[68217]: DEBUG oslo_vmware.api [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960941, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 704.396508] env[68217]: DEBUG oslo_concurrency.lockutils [req-15de96de-9fe7-4b08-a189-e88f80049f2d req-aeedd57e-89e1-44b1-87a4-6e2b4c08fe9b service nova] Releasing lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 704.422013] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 704.422316] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 704.548153] env[68217]: DEBUG nova.network.neutron [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 704.680321] env[68217]: DEBUG oslo_vmware.api [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2960941, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151678} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 704.680321] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 704.680321] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 704.680321] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.680321] env[68217]: INFO nova.compute.manager [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 704.680478] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 704.680539] env[68217]: DEBUG nova.compute.manager [-] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 704.680639] env[68217]: DEBUG nova.network.neutron [-] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 704.909500] env[68217]: DEBUG nova.network.neutron [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updating instance_info_cache with network_info: [{"id": "396bcf29-b670-48a2-8e98-fb771af5eb13", "address": "fa:16:3e:1b:40:93", "network": {"id": "4e1058e7-f813-46ae-9371-9bc53e8c24cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1123881374-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c7e14d2fe94e58bb3df92a8841486b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap396bcf29-b6", "ovs_interfaceid": "396bcf29-b670-48a2-8e98-fb771af5eb13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.008114] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05bed089-bf72-41a2-806a-211a27f00d1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.019124] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77246b82-48ae-41d4-8feb-d7b3fe5334d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.054077] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8298ede-16dd-4156-9e3c-ff09746268c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.063505] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c71ddce-971d-42aa-abcb-be024cb381a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.078773] env[68217]: DEBUG nova.compute.provider_tree [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.216651] env[68217]: DEBUG nova.compute.manager [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Received event network-changed-2077ec71-7159-4678-b110-039046651648 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 705.216888] env[68217]: DEBUG nova.compute.manager [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Refreshing instance network info cache due to event network-changed-2077ec71-7159-4678-b110-039046651648. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 705.217628] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Acquiring lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.217806] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Acquired lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 705.218184] env[68217]: DEBUG nova.network.neutron [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Refreshing network info cache for port 2077ec71-7159-4678-b110-039046651648 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 705.415442] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Releasing lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 705.415774] env[68217]: DEBUG nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Instance network_info: |[{"id": "396bcf29-b670-48a2-8e98-fb771af5eb13", "address": "fa:16:3e:1b:40:93", "network": {"id": "4e1058e7-f813-46ae-9371-9bc53e8c24cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1123881374-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c7e14d2fe94e58bb3df92a8841486b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap396bcf29-b6", "ovs_interfaceid": "396bcf29-b670-48a2-8e98-fb771af5eb13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 705.418018] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:40:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f408ce42-3cac-4d9d-9c05-15471d653a18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '396bcf29-b670-48a2-8e98-fb771af5eb13', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 705.428094] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Creating folder: Project (87c7e14d2fe94e58bb3df92a8841486b). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.428094] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-65976d12-b61f-4c4d-b025-fb1b53e484bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.444668] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Created folder: Project (87c7e14d2fe94e58bb3df92a8841486b) in parent group-v594094. [ 705.444867] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Creating folder: Instances. Parent ref: group-v594198. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 705.445156] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-28005147-92fe-4fe3-8c3b-bc957f746361 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.460100] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Created folder: Instances in parent group-v594198. [ 705.460100] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 705.460100] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 705.460100] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9dc4f972-a9e2-4f4a-bbaf-3ddf80209520 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.484282] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 705.484282] env[68217]: value = "task-2960944" [ 705.484282] env[68217]: _type = "Task" [ 705.484282] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 705.498923] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960944, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 705.606345] env[68217]: ERROR nova.scheduler.client.report [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [req-0cee4fc2-a1a2-4fdf-b62e-d137e8fa349f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0cee4fc2-a1a2-4fdf-b62e-d137e8fa349f"}]} [ 705.627817] env[68217]: DEBUG nova.scheduler.client.report [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 705.651861] env[68217]: DEBUG nova.scheduler.client.report [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 705.651861] env[68217]: DEBUG nova.compute.provider_tree [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 705.672509] env[68217]: DEBUG nova.scheduler.client.report [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 705.700705] env[68217]: DEBUG nova.scheduler.client.report [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 705.956229] env[68217]: DEBUG nova.compute.manager [req-a23d0a2f-213b-4653-bd90-a334fc8fee12 req-11c6ac38-5ce2-4e6f-8ce4-db0d0933fa0d service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Received event network-vif-deleted-59d5e487-25d1-47fb-8b16-ebba73a03a4a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 705.956443] env[68217]: INFO nova.compute.manager [req-a23d0a2f-213b-4653-bd90-a334fc8fee12 req-11c6ac38-5ce2-4e6f-8ce4-db0d0933fa0d service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Neutron deleted interface 59d5e487-25d1-47fb-8b16-ebba73a03a4a; detaching it from the instance and deleting it from the info cache [ 705.956618] env[68217]: DEBUG nova.network.neutron [req-a23d0a2f-213b-4653-bd90-a334fc8fee12 req-11c6ac38-5ce2-4e6f-8ce4-db0d0933fa0d service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.003186] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960944, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.010450] env[68217]: DEBUG nova.network.neutron [-] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.085145] env[68217]: DEBUG nova.network.neutron [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updated VIF entry in instance network info cache for port 2077ec71-7159-4678-b110-039046651648. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 706.085145] env[68217]: DEBUG nova.network.neutron [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updating instance_info_cache with network_info: [{"id": "2077ec71-7159-4678-b110-039046651648", "address": "fa:16:3e:d6:50:3d", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2077ec71-71", "ovs_interfaceid": "2077ec71-7159-4678-b110-039046651648", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.268952] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc1e5f30-67ce-4547-b46f-fd5711381191 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.277694] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-654ead3d-b391-461b-acd8-a86fc8d43b55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.313761] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3252e2c-09e8-417b-b1d0-3637746c5be8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.322286] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f84370d-a4c2-48c5-841b-a8328f979608 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.336543] env[68217]: DEBUG nova.compute.provider_tree [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 706.461681] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4f38b2f1-0b20-4feb-ab86-4ddaf9e5b2a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.472327] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81d67fac-d83f-4c76-a99d-e1ce39037905 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.500508] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960944, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 706.512020] env[68217]: DEBUG nova.compute.manager [req-a23d0a2f-213b-4653-bd90-a334fc8fee12 req-11c6ac38-5ce2-4e6f-8ce4-db0d0933fa0d service nova] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Detach interface failed, port_id=59d5e487-25d1-47fb-8b16-ebba73a03a4a, reason: Instance 83d32dd6-2629-4451-a746-bf5270083e2a could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 706.513862] env[68217]: INFO nova.compute.manager [-] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Took 1.83 seconds to deallocate network for instance. [ 706.597027] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Releasing lock "refresh_cache-b5e15801-301a-4ee6-87d2-bbf749967631" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 706.597027] env[68217]: DEBUG nova.compute.manager [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Received event network-vif-plugged-396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 706.597027] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Acquiring lock "da1524a7-2756-4429-ada2-b1f493544bd2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 706.597027] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Lock "da1524a7-2756-4429-ada2-b1f493544bd2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 706.597027] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Lock "da1524a7-2756-4429-ada2-b1f493544bd2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 706.597501] env[68217]: DEBUG nova.compute.manager [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] No waiting events found dispatching network-vif-plugged-396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 706.597501] env[68217]: WARNING nova.compute.manager [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Received unexpected event network-vif-plugged-396bcf29-b670-48a2-8e98-fb771af5eb13 for instance with vm_state building and task_state spawning. [ 706.597501] env[68217]: DEBUG nova.compute.manager [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Received event network-changed-396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 706.597501] env[68217]: DEBUG nova.compute.manager [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Refreshing instance network info cache due to event network-changed-396bcf29-b670-48a2-8e98-fb771af5eb13. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 706.597501] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Acquiring lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.597703] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Acquired lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 706.597703] env[68217]: DEBUG nova.network.neutron [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Refreshing network info cache for port 396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 706.880826] env[68217]: DEBUG nova.scheduler.client.report [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 59 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 706.881115] env[68217]: DEBUG nova.compute.provider_tree [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 59 to 60 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 706.881286] env[68217]: DEBUG nova.compute.provider_tree [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 707.004395] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960944, 'name': CreateVM_Task, 'duration_secs': 1.496532} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.004395] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 707.004395] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.004395] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.004395] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 707.004611] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dffb0c29-ac45-4fdc-b0ea-b809fefe8cea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.010594] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 707.010594] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5240b845-ea07-add8-066e-88711b50d13d" [ 707.010594] env[68217]: _type = "Task" [ 707.010594] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.019246] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5240b845-ea07-add8-066e-88711b50d13d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.022230] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 707.298290] env[68217]: DEBUG nova.network.neutron [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updated VIF entry in instance network info cache for port 396bcf29-b670-48a2-8e98-fb771af5eb13. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 707.298661] env[68217]: DEBUG nova.network.neutron [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updating instance_info_cache with network_info: [{"id": "396bcf29-b670-48a2-8e98-fb771af5eb13", "address": "fa:16:3e:1b:40:93", "network": {"id": "4e1058e7-f813-46ae-9371-9bc53e8c24cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1123881374-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c7e14d2fe94e58bb3df92a8841486b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap396bcf29-b6", "ovs_interfaceid": "396bcf29-b670-48a2-8e98-fb771af5eb13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.336761] env[68217]: DEBUG nova.compute.manager [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Received event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 707.337142] env[68217]: DEBUG nova.compute.manager [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing instance network info cache due to event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 707.337724] env[68217]: DEBUG oslo_concurrency.lockutils [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] Acquiring lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.338031] env[68217]: DEBUG oslo_concurrency.lockutils [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] Acquired lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.340497] env[68217]: DEBUG nova.network.neutron [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 707.387565] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.933s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 707.388951] env[68217]: DEBUG nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 707.391864] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.321s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 707.392340] env[68217]: DEBUG nova.objects.instance [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lazy-loading 'resources' on Instance uuid 6113feaf-5c21-49c3-9c19-ea10b60786d3 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 707.523212] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5240b845-ea07-add8-066e-88711b50d13d, 'name': SearchDatastore_Task, 'duration_secs': 0.010402} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 707.523986] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.523986] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 707.523986] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 707.523986] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 707.524152] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 707.524421] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ff814c6-1065-47f0-883e-a4f8a0fe152e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.534807] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 707.535161] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 707.535934] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cb6c4cee-1afa-4512-a96c-52fb8f2a8ebe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.541905] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 707.541905] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5275a927-cf5d-5aaf-0817-ce7e39ce41f1" [ 707.541905] env[68217]: _type = "Task" [ 707.541905] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.550507] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5275a927-cf5d-5aaf-0817-ce7e39ce41f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 707.802910] env[68217]: DEBUG oslo_concurrency.lockutils [req-03874e10-1932-40ed-8c16-2f74c37477a3 req-b67dccf6-f87c-477a-87f7-f22b4cf82d16 service nova] Releasing lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 707.897733] env[68217]: DEBUG nova.compute.utils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 707.906188] env[68217]: DEBUG nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 707.906188] env[68217]: DEBUG nova.network.neutron [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 707.969787] env[68217]: DEBUG nova.policy [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36276be4c67c4abfa0941293d4cc800b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebfeb38b81794c558c1164cecd7fa221', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 708.057031] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5275a927-cf5d-5aaf-0817-ce7e39ce41f1, 'name': SearchDatastore_Task, 'duration_secs': 0.008612} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.062592] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-137a3083-da9d-47d4-9613-f16878c9e460 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.072377] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 708.072377] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522fd71d-fc42-083d-3454-58a177c9e276" [ 708.072377] env[68217]: _type = "Task" [ 708.072377] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.082641] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522fd71d-fc42-083d-3454-58a177c9e276, 'name': SearchDatastore_Task, 'duration_secs': 0.010882} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.085913] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 708.086244] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] da1524a7-2756-4429-ada2-b1f493544bd2/da1524a7-2756-4429-ada2-b1f493544bd2.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 708.086651] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7caf02fc-5d20-4517-9e24-12a1c4419ef6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.096209] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 708.096209] env[68217]: value = "task-2960945" [ 708.096209] env[68217]: _type = "Task" [ 708.096209] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.110908] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960945, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.234185] env[68217]: DEBUG nova.network.neutron [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updated VIF entry in instance network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 708.234185] env[68217]: DEBUG nova.network.neutron [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.404251] env[68217]: DEBUG nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 708.491874] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e87c32f-b8b1-4b63-a011-3bc2bd0d130d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.503285] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10e515e9-eaff-4793-8ac8-e7f67f4a8fbb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.543333] env[68217]: DEBUG nova.network.neutron [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Successfully created port: 1f12aa8d-6df1-467f-b0b7-9994d13446cb {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 708.545474] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e642ef4-6c61-4a06-9a5a-e0a3fdda7633 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.555496] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5046a51b-ad4f-421b-ab8c-5de5db5c2441 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.572818] env[68217]: DEBUG nova.compute.provider_tree [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 708.605539] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960945, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.734490] env[68217]: DEBUG oslo_concurrency.lockutils [req-e9b5b17a-6634-48aa-8bf6-390441655aa9 req-9ace4583-cb55-4a18-8f1b-bfa9e24b2ec0 service nova] Releasing lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 709.076584] env[68217]: DEBUG nova.scheduler.client.report [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 709.111411] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960945, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.535355} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.111411] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] da1524a7-2756-4429-ada2-b1f493544bd2/da1524a7-2756-4429-ada2-b1f493544bd2.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 709.111411] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 709.111411] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1ded3dc0-b7fc-4424-902e-cd20ff6adb6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.120233] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 709.120233] env[68217]: value = "task-2960946" [ 709.120233] env[68217]: _type = "Task" [ 709.120233] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.126342] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 709.417875] env[68217]: DEBUG nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 709.450407] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 709.450737] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 709.453032] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 709.453335] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 709.453499] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 709.453652] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 709.453866] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 709.454041] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 709.454215] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 709.454378] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 709.454547] env[68217]: DEBUG nova.virt.hardware [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 709.455454] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf7ad5eb-b5cb-42b3-ac0f-d7a6d0c1124a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.464280] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1431ee88-b7da-4ef5-8368-9160028778d5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.583824] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.192s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 709.588764] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.697s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.589048] env[68217]: DEBUG nova.objects.instance [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lazy-loading 'resources' on Instance uuid 17bea068-7d7a-4a87-8b27-91a7efcd45c5 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 709.612024] env[68217]: INFO nova.scheduler.client.report [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Deleted allocations for instance 6113feaf-5c21-49c3-9c19-ea10b60786d3 [ 709.633694] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073906} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 709.633694] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 709.633694] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8083011-9908-485e-a2a2-993932a46dff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.666728] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Reconfiguring VM instance instance-00000024 to attach disk [datastore2] da1524a7-2756-4429-ada2-b1f493544bd2/da1524a7-2756-4429-ada2-b1f493544bd2.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 709.667784] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-79f99a63-ebb6-499b-907d-5158690fcc3d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.689165] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "693d6a74-a671-4d02-8798-cd3975507428" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 709.689469] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 709.689742] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 709.689742] env[68217]: value = "task-2960947" [ 709.689742] env[68217]: _type = "Task" [ 709.689742] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 709.698880] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960947, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.126912] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ad91611-d93a-4c83-892b-62cb3b1be580 tempest-ServersAaction247Test-901864586 tempest-ServersAaction247Test-901864586-project-member] Lock "6113feaf-5c21-49c3-9c19-ea10b60786d3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.360s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.211381] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960947, 'name': ReconfigVM_Task, 'duration_secs': 0.316309} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.212078] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Reconfigured VM instance instance-00000024 to attach disk [datastore2] da1524a7-2756-4429-ada2-b1f493544bd2/da1524a7-2756-4429-ada2-b1f493544bd2.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 710.212819] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-61094c1d-5bd3-48d7-8453-8d0f2db7d4c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.222271] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 710.222271] env[68217]: value = "task-2960948" [ 710.222271] env[68217]: _type = "Task" [ 710.222271] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.228810] env[68217]: DEBUG nova.compute.manager [req-b4d512be-49ca-4176-8431-b1e3f47c9dcd req-8f6122ee-0815-4ba5-acff-bfc9a188596f service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Received event network-vif-plugged-1f12aa8d-6df1-467f-b0b7-9994d13446cb {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 710.228810] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4d512be-49ca-4176-8431-b1e3f47c9dcd req-8f6122ee-0815-4ba5-acff-bfc9a188596f service nova] Acquiring lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 710.228996] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4d512be-49ca-4176-8431-b1e3f47c9dcd req-8f6122ee-0815-4ba5-acff-bfc9a188596f service nova] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 710.229053] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4d512be-49ca-4176-8431-b1e3f47c9dcd req-8f6122ee-0815-4ba5-acff-bfc9a188596f service nova] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 710.229243] env[68217]: DEBUG nova.compute.manager [req-b4d512be-49ca-4176-8431-b1e3f47c9dcd req-8f6122ee-0815-4ba5-acff-bfc9a188596f service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] No waiting events found dispatching network-vif-plugged-1f12aa8d-6df1-467f-b0b7-9994d13446cb {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 710.229361] env[68217]: WARNING nova.compute.manager [req-b4d512be-49ca-4176-8431-b1e3f47c9dcd req-8f6122ee-0815-4ba5-acff-bfc9a188596f service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Received unexpected event network-vif-plugged-1f12aa8d-6df1-467f-b0b7-9994d13446cb for instance with vm_state building and task_state spawning. [ 710.239800] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960948, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.451201] env[68217]: DEBUG nova.network.neutron [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Successfully updated port: 1f12aa8d-6df1-467f-b0b7-9994d13446cb {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 710.736403] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960948, 'name': Rename_Task, 'duration_secs': 0.368486} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 710.736753] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 710.737295] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6f8d834-9124-404b-82da-ef6e992bc206 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.740922] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c528fc4a-4e67-4ea5-b5fd-63e30036b562 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.745203] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 710.745203] env[68217]: value = "task-2960949" [ 710.745203] env[68217]: _type = "Task" [ 710.745203] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 710.752853] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd6aea5-f685-4e10-ae06-606b6cd7308d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.758420] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960949, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 710.787226] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-529e1e20-b027-401f-9b57-dca6ad724594 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.796118] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14c1c25c-1402-4d2b-9f22-755600b8a889 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.811372] env[68217]: DEBUG nova.compute.provider_tree [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.955306] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "refresh_cache-9d2b3670-ef8a-477a-b876-7a8fe37fa065" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 710.955306] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "refresh_cache-9d2b3670-ef8a-477a-b876-7a8fe37fa065" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 710.955461] env[68217]: DEBUG nova.network.neutron [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 711.258846] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960949, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 711.314682] env[68217]: DEBUG nova.scheduler.client.report [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 711.518999] env[68217]: DEBUG nova.network.neutron [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 711.758667] env[68217]: DEBUG oslo_vmware.api [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2960949, 'name': PowerOnVM_Task, 'duration_secs': 0.648535} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 711.758667] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 711.758667] env[68217]: INFO nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Took 9.48 seconds to spawn the instance on the hypervisor. [ 711.758667] env[68217]: DEBUG nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 711.759419] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27633f21-ccaf-4be6-af59-68457097d48b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.813070] env[68217]: DEBUG nova.network.neutron [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Updating instance_info_cache with network_info: [{"id": "1f12aa8d-6df1-467f-b0b7-9994d13446cb", "address": "fa:16:3e:ac:7d:80", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f12aa8d-6d", "ovs_interfaceid": "1f12aa8d-6df1-467f-b0b7-9994d13446cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.820682] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.232s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 711.824023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 34.071s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 711.827147] env[68217]: DEBUG nova.objects.instance [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lazy-loading 'resources' on Instance uuid 63e0fc9e-5182-4781-b007-69e2134718df {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 711.854872] env[68217]: INFO nova.scheduler.client.report [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Deleted allocations for instance 17bea068-7d7a-4a87-8b27-91a7efcd45c5 [ 712.275428] env[68217]: INFO nova.compute.manager [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Took 43.59 seconds to build instance. [ 712.315546] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "refresh_cache-9d2b3670-ef8a-477a-b876-7a8fe37fa065" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 712.315884] env[68217]: DEBUG nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Instance network_info: |[{"id": "1f12aa8d-6df1-467f-b0b7-9994d13446cb", "address": "fa:16:3e:ac:7d:80", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f12aa8d-6d", "ovs_interfaceid": "1f12aa8d-6df1-467f-b0b7-9994d13446cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 712.316306] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ac:7d:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f12aa8d-6df1-467f-b0b7-9994d13446cb', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 712.324309] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating folder: Project (ebfeb38b81794c558c1164cecd7fa221). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.324799] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5acd81ef-aa61-4ed0-baa5-b4b998c82e70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.340389] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created folder: Project (ebfeb38b81794c558c1164cecd7fa221) in parent group-v594094. [ 712.340389] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating folder: Instances. Parent ref: group-v594201. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 712.340389] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-176529bc-0c5c-4d7d-bbd7-8ab8c4e78e4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.349374] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created folder: Instances in parent group-v594201. [ 712.349605] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 712.349796] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 712.349995] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-03e2d1f0-8d6a-4e45-ba32-126c0016cf04 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.373090] env[68217]: DEBUG nova.compute.manager [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Received event network-changed-1f12aa8d-6df1-467f-b0b7-9994d13446cb {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 712.373293] env[68217]: DEBUG nova.compute.manager [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Refreshing instance network info cache due to event network-changed-1f12aa8d-6df1-467f-b0b7-9994d13446cb. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 712.373513] env[68217]: DEBUG oslo_concurrency.lockutils [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] Acquiring lock "refresh_cache-9d2b3670-ef8a-477a-b876-7a8fe37fa065" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.373651] env[68217]: DEBUG oslo_concurrency.lockutils [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] Acquired lock "refresh_cache-9d2b3670-ef8a-477a-b876-7a8fe37fa065" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.373803] env[68217]: DEBUG nova.network.neutron [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Refreshing network info cache for port 1f12aa8d-6df1-467f-b0b7-9994d13446cb {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 712.378209] env[68217]: DEBUG oslo_concurrency.lockutils [None req-abe87f37-169e-41d7-b13d-82ad4439b483 tempest-ServersAdmin275Test-438001033 tempest-ServersAdmin275Test-438001033-project-member] Lock "17bea068-7d7a-4a87-8b27-91a7efcd45c5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 39.722s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.386331] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 712.386331] env[68217]: value = "task-2960952" [ 712.386331] env[68217]: _type = "Task" [ 712.386331] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.398078] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960952, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.778114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-104f8c89-3917-4c85-b5e9-a9ed7bc31a65 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 91.362s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 712.839518] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-763d3a86-5837-4bce-9061-b1851393fc5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.847810] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f97b03e-5d87-4dae-9980-61ffbe6c7fe0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.879161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a3c4e64-8a7c-4d8c-939f-b3c4b0b8b762 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.892087] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746ecc40-fba6-4580-adc1-af092018f844 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.900092] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960952, 'name': CreateVM_Task, 'duration_secs': 0.354699} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.908229] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 712.908582] env[68217]: DEBUG nova.compute.provider_tree [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.914018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.914018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 712.914018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 712.914018] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30f788cd-0b4d-4870-82e4-46dd8af7cdea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.915556] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 712.915556] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5228f953-e82d-57fb-9b0c-ef8bd5fa7811" [ 712.915556] env[68217]: _type = "Task" [ 712.915556] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.924421] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5228f953-e82d-57fb-9b0c-ef8bd5fa7811, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.102051] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "db4cf157-9511-423c-aa41-433af8d92b48" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.102331] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "db4cf157-9511-423c-aa41-433af8d92b48" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.102544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "db4cf157-9511-423c-aa41-433af8d92b48-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.102724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "db4cf157-9511-423c-aa41-433af8d92b48-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.102890] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "db4cf157-9511-423c-aa41-433af8d92b48-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.105510] env[68217]: INFO nova.compute.manager [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Terminating instance [ 713.280594] env[68217]: DEBUG nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 713.383029] env[68217]: DEBUG nova.network.neutron [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Updated VIF entry in instance network info cache for port 1f12aa8d-6df1-467f-b0b7-9994d13446cb. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 713.383735] env[68217]: DEBUG nova.network.neutron [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Updating instance_info_cache with network_info: [{"id": "1f12aa8d-6df1-467f-b0b7-9994d13446cb", "address": "fa:16:3e:ac:7d:80", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f12aa8d-6d", "ovs_interfaceid": "1f12aa8d-6df1-467f-b0b7-9994d13446cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.412082] env[68217]: DEBUG nova.scheduler.client.report [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 713.427169] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5228f953-e82d-57fb-9b0c-ef8bd5fa7811, 'name': SearchDatastore_Task, 'duration_secs': 0.009974} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.427169] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.427169] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 713.427484] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.427561] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 713.427731] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 713.428323] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7109492-8cd6-45a9-bfbe-3b6ab144f1b4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.436551] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 713.436731] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 713.437488] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-285fabf6-deaf-4b07-b23d-e78505288d5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.443018] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 713.443018] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528610a8-d6d4-5657-6b95-6bd0cca01218" [ 713.443018] env[68217]: _type = "Task" [ 713.443018] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.451978] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528610a8-d6d4-5657-6b95-6bd0cca01218, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.609398] env[68217]: DEBUG nova.compute.manager [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 713.609398] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.610612] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2194682f-a0f0-4c09-bea5-52cdd380b492 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.618243] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 713.618487] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7f89def-f25b-4b24-b1a8-fc9117c21691 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.626539] env[68217]: DEBUG oslo_vmware.api [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 713.626539] env[68217]: value = "task-2960953" [ 713.626539] env[68217]: _type = "Task" [ 713.626539] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.637646] env[68217]: DEBUG oslo_vmware.api [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960953, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 713.805205] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 713.886029] env[68217]: DEBUG oslo_concurrency.lockutils [req-b55d752c-3b6f-4e71-b8ae-6937afe70103 req-4a72f86d-bdaa-4de6-a55f-ad9b3eab9a59 service nova] Releasing lock "refresh_cache-9d2b3670-ef8a-477a-b876-7a8fe37fa065" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 713.917347] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.093s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 713.920037] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 35.668s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 713.921123] env[68217]: INFO nova.compute.claims [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.941327] env[68217]: INFO nova.scheduler.client.report [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleted allocations for instance 63e0fc9e-5182-4781-b007-69e2134718df [ 713.963548] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528610a8-d6d4-5657-6b95-6bd0cca01218, 'name': SearchDatastore_Task, 'duration_secs': 0.020367} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 713.964376] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33d19dfc-1b28-443a-8326-69ec7a0c5f90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.970886] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 713.970886] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224180b-483a-638a-d34a-237a269cc256" [ 713.970886] env[68217]: _type = "Task" [ 713.970886] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 713.981641] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224180b-483a-638a-d34a-237a269cc256, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.137381] env[68217]: DEBUG oslo_vmware.api [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960953, 'name': PowerOffVM_Task, 'duration_secs': 0.25142} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.137665] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 714.137863] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 714.138222] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-06240bc3-3b53-429a-8270-abb11183c3e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.205882] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 714.206121] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 714.206317] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Deleting the datastore file [datastore1] db4cf157-9511-423c-aa41-433af8d92b48 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 714.206645] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-81d08947-096c-40e6-a8eb-9d9e38e512a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.213288] env[68217]: DEBUG oslo_vmware.api [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for the task: (returnval){ [ 714.213288] env[68217]: value = "task-2960955" [ 714.213288] env[68217]: _type = "Task" [ 714.213288] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.224544] env[68217]: DEBUG oslo_vmware.api [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960955, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.460565] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2217e3-de51-42d7-baf0-189fcdb91e76 tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "63e0fc9e-5182-4781-b007-69e2134718df" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 40.126s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 714.482035] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224180b-483a-638a-d34a-237a269cc256, 'name': SearchDatastore_Task, 'duration_secs': 0.00975} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.483684] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 714.483950] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 9d2b3670-ef8a-477a-b876-7a8fe37fa065/9d2b3670-ef8a-477a-b876-7a8fe37fa065.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 714.485630] env[68217]: DEBUG nova.compute.manager [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Received event network-changed-396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 714.485817] env[68217]: DEBUG nova.compute.manager [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Refreshing instance network info cache due to event network-changed-396bcf29-b670-48a2-8e98-fb771af5eb13. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 714.486033] env[68217]: DEBUG oslo_concurrency.lockutils [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] Acquiring lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.486179] env[68217]: DEBUG oslo_concurrency.lockutils [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] Acquired lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 714.486340] env[68217]: DEBUG nova.network.neutron [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Refreshing network info cache for port 396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 714.488549] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1abe457d-9cbe-4cd5-8a08-82cc91573571 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.501099] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 714.501099] env[68217]: value = "task-2960956" [ 714.501099] env[68217]: _type = "Task" [ 714.501099] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 714.513026] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960956, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 714.730437] env[68217]: DEBUG oslo_vmware.api [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Task: {'id': task-2960955, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145185} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 714.730765] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 714.730974] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 714.731283] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 714.731603] env[68217]: INFO nova.compute.manager [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Took 1.12 seconds to destroy the instance on the hypervisor. [ 714.731999] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 714.732355] env[68217]: DEBUG nova.compute.manager [-] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 714.732535] env[68217]: DEBUG nova.network.neutron [-] [instance: db4cf157-9511-423c-aa41-433af8d92b48] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 715.012531] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960956, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.491636} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.015451] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 9d2b3670-ef8a-477a-b876-7a8fe37fa065/9d2b3670-ef8a-477a-b876-7a8fe37fa065.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 715.015754] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 715.016507] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0aec308-c9cb-475d-844d-52651dc82434 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.023231] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 715.023231] env[68217]: value = "task-2960957" [ 715.023231] env[68217]: _type = "Task" [ 715.023231] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.036305] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960957, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.329826] env[68217]: DEBUG nova.network.neutron [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updated VIF entry in instance network info cache for port 396bcf29-b670-48a2-8e98-fb771af5eb13. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 715.330278] env[68217]: DEBUG nova.network.neutron [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updating instance_info_cache with network_info: [{"id": "396bcf29-b670-48a2-8e98-fb771af5eb13", "address": "fa:16:3e:1b:40:93", "network": {"id": "4e1058e7-f813-46ae-9371-9bc53e8c24cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1123881374-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.143", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c7e14d2fe94e58bb3df92a8841486b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap396bcf29-b6", "ovs_interfaceid": "396bcf29-b670-48a2-8e98-fb771af5eb13", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.426675] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c8f78fb-26ef-49b9-8de5-ca67459fec53 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.435797] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6cc867c4-3cc9-4dd2-a303-bafe50d294b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.472080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eef3e55a-a750-403b-bd21-998429efe794 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.481046] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3488c3d-787d-468a-9845-51b1806a3603 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.498677] env[68217]: DEBUG nova.compute.provider_tree [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 715.535798] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960957, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065868} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 715.536079] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 715.536961] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0372779c-9529-41c3-9b10-02b4138c8830 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.561782] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Reconfiguring VM instance instance-00000025 to attach disk [datastore1] 9d2b3670-ef8a-477a-b876-7a8fe37fa065/9d2b3670-ef8a-477a-b876-7a8fe37fa065.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 715.562121] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-dd720acb-8481-45ab-9f92-e45b97261d76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.584955] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 715.584955] env[68217]: value = "task-2960958" [ 715.584955] env[68217]: _type = "Task" [ 715.584955] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 715.593146] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960958, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 715.833616] env[68217]: DEBUG oslo_concurrency.lockutils [req-7121f520-9d4b-4cbc-a605-c3a4c2a28d2a req-2de958e8-0394-4cff-9688-3b79e57f742e service nova] Releasing lock "refresh_cache-da1524a7-2756-4429-ada2-b1f493544bd2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 715.893629] env[68217]: DEBUG nova.network.neutron [-] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.028170] env[68217]: ERROR nova.scheduler.client.report [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [req-81e64637-d719-4cc9-9964-495d54eaf697] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-81e64637-d719-4cc9-9964-495d54eaf697"}]} [ 716.050040] env[68217]: DEBUG nova.scheduler.client.report [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 716.064614] env[68217]: DEBUG nova.scheduler.client.report [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 716.064906] env[68217]: DEBUG nova.compute.provider_tree [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 716.077977] env[68217]: DEBUG nova.scheduler.client.report [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 716.095096] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960958, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.098752] env[68217]: DEBUG nova.scheduler.client.report [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 716.394325] env[68217]: INFO nova.compute.manager [-] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Took 1.66 seconds to deallocate network for instance. [ 716.588170] env[68217]: DEBUG nova.compute.manager [req-bdefb9ea-9ba3-4b1a-a2bf-50e4155f390a req-c42ab9e7-2359-46b8-a996-8fe5bc96571c service nova] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Received event network-vif-deleted-8fbb4d07-34f6-4f5c-8057-fd5de704aba2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 716.601321] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960958, 'name': ReconfigVM_Task, 'duration_secs': 0.665503} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 716.601642] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Reconfigured VM instance instance-00000025 to attach disk [datastore1] 9d2b3670-ef8a-477a-b876-7a8fe37fa065/9d2b3670-ef8a-477a-b876-7a8fe37fa065.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 716.602437] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dedf7928-faca-4573-ae86-1040c52ac10f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.614258] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 716.614258] env[68217]: value = "task-2960959" [ 716.614258] env[68217]: _type = "Task" [ 716.614258] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 716.625478] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960959, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 716.658867] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a1d7b2c-6cca-4dda-b7bf-fa11757cd6bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.670161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5ea63e-6b63-4a6d-9471-77b6d203c9cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.708971] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9523e21-7543-4a05-ad02-403eedf663ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.717343] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b5a682-3ea3-4c56-bde0-5d799f44eaaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.732625] env[68217]: DEBUG nova.compute.provider_tree [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 716.907847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.124498] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960959, 'name': Rename_Task, 'duration_secs': 0.139606} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.124757] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 717.124991] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92e52720-dcb0-4dab-8ca0-07509f1ba71a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.130741] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 717.130741] env[68217]: value = "task-2960960" [ 717.130741] env[68217]: _type = "Task" [ 717.130741] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 717.138103] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960960, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 717.271482] env[68217]: DEBUG nova.scheduler.client.report [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 61 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 717.271778] env[68217]: DEBUG nova.compute.provider_tree [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 61 to 62 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 717.271972] env[68217]: DEBUG nova.compute.provider_tree [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 717.640899] env[68217]: DEBUG oslo_vmware.api [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960960, 'name': PowerOnVM_Task, 'duration_secs': 0.436626} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 717.641189] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 717.641419] env[68217]: INFO nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Took 8.22 seconds to spawn the instance on the hypervisor. [ 717.641596] env[68217]: DEBUG nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 717.642385] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cf856e-a605-4518-9f05-dd24baa250e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.778587] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.859s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 717.779135] env[68217]: DEBUG nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 717.781638] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 39.421s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 717.781854] env[68217]: DEBUG nova.objects.instance [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lazy-loading 'resources' on Instance uuid 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 717.941789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 717.941789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 718.162702] env[68217]: INFO nova.compute.manager [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Took 46.77 seconds to build instance. [ 718.285425] env[68217]: DEBUG nova.compute.utils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 718.290455] env[68217]: DEBUG nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 718.290455] env[68217]: DEBUG nova.network.neutron [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 718.326676] env[68217]: DEBUG nova.policy [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '752b6d9ab4d64b1390ca8388fb28db15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad331ad8f44348f6b4c0a6c56977022d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 718.647288] env[68217]: DEBUG nova.network.neutron [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Successfully created port: c390985b-4383-44b2-881a-57ed086930c0 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 718.664174] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f89c7e86-2851-4213-aab4-d70666d45dd0 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 93.657s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 718.711015] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9efc40-423d-4cd6-b883-2ba954c8cfa4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.719519] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58d49fe6-f717-4d43-b6f8-720f05cedd5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.752122] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd647d3-d41c-4d2d-ae83-b2e4e27056ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.759400] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb25b1d-c707-4ceb-be54-d686ff949e93 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.772600] env[68217]: DEBUG nova.compute.provider_tree [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.793020] env[68217]: DEBUG nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 719.168561] env[68217]: DEBUG nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 719.276331] env[68217]: DEBUG nova.scheduler.client.report [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 719.365317] env[68217]: DEBUG oslo_concurrency.lockutils [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.365542] env[68217]: DEBUG oslo_concurrency.lockutils [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.365724] env[68217]: DEBUG nova.compute.manager [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 719.366633] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5ef9ebb-c522-4f72-bcb9-89f419b4a787 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.374033] env[68217]: DEBUG nova.compute.manager [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 719.374571] env[68217]: DEBUG nova.objects.instance [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lazy-loading 'flavor' on Instance uuid 9d2b3670-ef8a-477a-b876-7a8fe37fa065 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 719.689761] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 719.781047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.999s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 719.783326] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 41.188s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 719.783550] env[68217]: DEBUG nova.objects.instance [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lazy-loading 'resources' on Instance uuid 9ac81867-311c-42f3-b38f-67dc10f409c0 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 719.799105] env[68217]: INFO nova.scheduler.client.report [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Deleted allocations for instance 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9 [ 719.802961] env[68217]: DEBUG nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 719.827790] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 719.827934] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 719.828021] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 719.829023] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 719.829023] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 719.829023] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 719.829023] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 719.829023] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 719.829386] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 719.829386] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 719.829449] env[68217]: DEBUG nova.virt.hardware [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 719.830356] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b66a57f-0006-4e9b-93a5-158f6828325e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.838175] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09ec85c4-4d54-4b33-948e-bdee62546d38 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.311448] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40c88070-72b2-4190-861b-c428933c523d tempest-ListServersNegativeTestJSON-121190661 tempest-ListServersNegativeTestJSON-121190661-project-member] Lock "480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 45.905s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.387085] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 720.387085] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7c0c0ce7-ad90-4aba-955a-cbd22208e3fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.394237] env[68217]: DEBUG oslo_vmware.api [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 720.394237] env[68217]: value = "task-2960961" [ 720.394237] env[68217]: _type = "Task" [ 720.394237] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 720.409832] env[68217]: DEBUG oslo_vmware.api [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960961, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 720.661293] env[68217]: DEBUG nova.compute.manager [req-8eeaec84-5d37-43b3-88ab-26a6e6b16736 req-052de804-8e14-401f-9495-384a6eb9f24e service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Received event network-vif-plugged-c390985b-4383-44b2-881a-57ed086930c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 720.661293] env[68217]: DEBUG oslo_concurrency.lockutils [req-8eeaec84-5d37-43b3-88ab-26a6e6b16736 req-052de804-8e14-401f-9495-384a6eb9f24e service nova] Acquiring lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 720.661293] env[68217]: DEBUG oslo_concurrency.lockutils [req-8eeaec84-5d37-43b3-88ab-26a6e6b16736 req-052de804-8e14-401f-9495-384a6eb9f24e service nova] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 720.661293] env[68217]: DEBUG oslo_concurrency.lockutils [req-8eeaec84-5d37-43b3-88ab-26a6e6b16736 req-052de804-8e14-401f-9495-384a6eb9f24e service nova] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 720.661293] env[68217]: DEBUG nova.compute.manager [req-8eeaec84-5d37-43b3-88ab-26a6e6b16736 req-052de804-8e14-401f-9495-384a6eb9f24e service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] No waiting events found dispatching network-vif-plugged-c390985b-4383-44b2-881a-57ed086930c0 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 720.661468] env[68217]: WARNING nova.compute.manager [req-8eeaec84-5d37-43b3-88ab-26a6e6b16736 req-052de804-8e14-401f-9495-384a6eb9f24e service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Received unexpected event network-vif-plugged-c390985b-4383-44b2-881a-57ed086930c0 for instance with vm_state building and task_state spawning. [ 720.727256] env[68217]: DEBUG nova.network.neutron [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Successfully updated port: c390985b-4383-44b2-881a-57ed086930c0 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 720.764132] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4009c65-7c72-4e90-9ce5-677ba5185ed1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.773331] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc51b8b-68bd-45e3-8994-8916f7dbb61d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.808133] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f92908-056e-4db3-9a90-4bee7614d0e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.816492] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a49a5bdc-6a71-4ded-83fb-b7330bbecddb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.831441] env[68217]: DEBUG nova.compute.provider_tree [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.906777] env[68217]: DEBUG oslo_vmware.api [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960961, 'name': PowerOffVM_Task, 'duration_secs': 0.188069} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 720.910485] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 720.911491] env[68217]: DEBUG nova.compute.manager [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 720.911574] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-956961d5-f83d-4eaf-91e7-23dd9fec8f20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.229602] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-38787c7d-a9cf-4ce6-a112-c1ec259697ca" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.229771] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-38787c7d-a9cf-4ce6-a112-c1ec259697ca" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 721.230029] env[68217]: DEBUG nova.network.neutron [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 721.335415] env[68217]: DEBUG nova.scheduler.client.report [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 721.422954] env[68217]: DEBUG oslo_concurrency.lockutils [None req-64832110-ef7c-407e-b655-5f33943919fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.057s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.475790] env[68217]: DEBUG oslo_concurrency.lockutils [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Acquiring lock "af11d05f-4432-4505-bb52-226414488960" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 721.476025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lock "af11d05f-4432-4505-bb52-226414488960" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.762224] env[68217]: DEBUG nova.network.neutron [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 721.843089] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.058s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 721.846617] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 41.192s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 721.848129] env[68217]: INFO nova.compute.claims [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 721.879025] env[68217]: INFO nova.scheduler.client.report [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted allocations for instance 9ac81867-311c-42f3-b38f-67dc10f409c0 [ 721.919032] env[68217]: DEBUG nova.network.neutron [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Updating instance_info_cache with network_info: [{"id": "c390985b-4383-44b2-881a-57ed086930c0", "address": "fa:16:3e:96:d3:15", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc390985b-43", "ovs_interfaceid": "c390985b-4383-44b2-881a-57ed086930c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 721.980837] env[68217]: DEBUG nova.compute.utils [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 722.389773] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7fbaa3bb-8a45-4d14-847e-dae4f5836461 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "9ac81867-311c-42f3-b38f-67dc10f409c0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 47.303s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.422409] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-38787c7d-a9cf-4ce6-a112-c1ec259697ca" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 722.422816] env[68217]: DEBUG nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Instance network_info: |[{"id": "c390985b-4383-44b2-881a-57ed086930c0", "address": "fa:16:3e:96:d3:15", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc390985b-43", "ovs_interfaceid": "c390985b-4383-44b2-881a-57ed086930c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 722.423268] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:d3:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c390985b-4383-44b2-881a-57ed086930c0', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 722.430817] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating folder: Project (ad331ad8f44348f6b4c0a6c56977022d). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.431185] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5ee108ad-c76c-439e-9cb2-419509feafde {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.444564] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created folder: Project (ad331ad8f44348f6b4c0a6c56977022d) in parent group-v594094. [ 722.444842] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating folder: Instances. Parent ref: group-v594204. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 722.445024] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-be9a7023-dfc7-4a59-ab06-7249ac94be2a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.454769] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created folder: Instances in parent group-v594204. [ 722.455017] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 722.455220] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 722.455438] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-69e97cdf-3c01-4b81-a5bf-3338b9ffa767 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.474688] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 722.474688] env[68217]: value = "task-2960964" [ 722.474688] env[68217]: _type = "Task" [ 722.474688] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 722.485535] env[68217]: DEBUG oslo_concurrency.lockutils [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lock "af11d05f-4432-4505-bb52-226414488960" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 722.486036] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960964, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 722.984016] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960964, 'name': CreateVM_Task, 'duration_secs': 0.327618} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 722.986554] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 722.987712] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.987879] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 722.990934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 722.991208] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fb235f5-68d1-47aa-af20-a89841f50d19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.995998] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 722.995998] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525804d3-a778-4686-c19a-0ea52fa591e9" [ 722.995998] env[68217]: _type = "Task" [ 722.995998] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.006013] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525804d3-a778-4686-c19a-0ea52fa591e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.198010] env[68217]: DEBUG nova.compute.manager [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 723.199060] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a687881-1a1e-4de2-ba15-05dfdde39fe7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.297658] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d728e06-d796-43ac-b0f2-9b5d4b84d758 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.306019] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fd69e61-99ed-4992-819f-59e5539f864b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.335696] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb3fd40-d86a-4d66-a947-d2a98b0b230e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.343680] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42cc236b-21d5-436c-beec-6b43cff4b6f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.356895] env[68217]: DEBUG nova.compute.provider_tree [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 723.373865] env[68217]: DEBUG nova.compute.manager [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Received event network-changed-c390985b-4383-44b2-881a-57ed086930c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 723.374052] env[68217]: DEBUG nova.compute.manager [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Refreshing instance network info cache due to event network-changed-c390985b-4383-44b2-881a-57ed086930c0. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 723.374256] env[68217]: DEBUG oslo_concurrency.lockutils [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] Acquiring lock "refresh_cache-38787c7d-a9cf-4ce6-a112-c1ec259697ca" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.374394] env[68217]: DEBUG oslo_concurrency.lockutils [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] Acquired lock "refresh_cache-38787c7d-a9cf-4ce6-a112-c1ec259697ca" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.374543] env[68217]: DEBUG nova.network.neutron [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Refreshing network info cache for port c390985b-4383-44b2-881a-57ed086930c0 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 723.506400] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525804d3-a778-4686-c19a-0ea52fa591e9, 'name': SearchDatastore_Task, 'duration_secs': 0.0107} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 723.506704] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 723.506925] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 723.507134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.507272] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 723.507444] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 723.507770] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e42cd6b9-a575-469b-9d1e-281353fa711f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.516532] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 723.516704] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 723.517431] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84420f8f-04da-4041-9b94-1b9b0b039198 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.523028] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 723.523028] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5275ebb4-3e10-13e7-062a-8252d3cf1b80" [ 723.523028] env[68217]: _type = "Task" [ 723.523028] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.531283] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5275ebb4-3e10-13e7-062a-8252d3cf1b80, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.569603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Acquiring lock "af11d05f-4432-4505-bb52-226414488960" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.569603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lock "af11d05f-4432-4505-bb52-226414488960" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.569603] env[68217]: INFO nova.compute.manager [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Attaching volume 7a0a3945-c469-4d2a-a6b4-db90a9398d63 to /dev/sdb [ 723.608087] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ccf5797-4dc9-45de-ad95-6f2cc4cae880 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.613997] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8f1eef-e738-4302-af24-4fe939293618 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.629772] env[68217]: DEBUG nova.virt.block_device [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Updating existing volume attachment record: 24d515f5-c3eb-4016-9c6f-07e72e4ac7cd {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 723.719399] env[68217]: INFO nova.compute.manager [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] instance snapshotting [ 723.719836] env[68217]: WARNING nova.compute.manager [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] trying to snapshot a non-running instance: (state: 4 expected: 1) [ 723.723238] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d6789d-af23-4bf9-96df-c5ec76e62dec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.727490] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.727713] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.727896] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "11f9c054-62b9-4ac9-9651-5c85e7a86663-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 723.728081] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 723.728269] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 723.730255] env[68217]: INFO nova.compute.manager [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Terminating instance [ 723.749098] env[68217]: DEBUG nova.compute.manager [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 723.749098] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.749098] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8331c913-fb01-4258-834f-0ed50439814a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.753331] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2596beef-97e4-47f9-aee0-2029b2834913 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.763265] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 723.763761] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a819c785-7853-480d-a13d-a818fd9da781 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.769847] env[68217]: DEBUG oslo_vmware.api [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 723.769847] env[68217]: value = "task-2960965" [ 723.769847] env[68217]: _type = "Task" [ 723.769847] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 723.778744] env[68217]: DEBUG oslo_vmware.api [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 723.862827] env[68217]: DEBUG nova.scheduler.client.report [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 724.034484] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5275ebb4-3e10-13e7-062a-8252d3cf1b80, 'name': SearchDatastore_Task, 'duration_secs': 0.009599} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.035242] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b328453d-8445-4a3c-8156-8cb6cd30d450 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.043141] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 724.043141] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52843c5b-6a2a-aea0-42b5-5521acc37f55" [ 724.043141] env[68217]: _type = "Task" [ 724.043141] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.050862] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52843c5b-6a2a-aea0-42b5-5521acc37f55, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.265689] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 724.266050] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f4980413-a366-4f00-bb4a-774993b0a8e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.277141] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 724.277141] env[68217]: value = "task-2960969" [ 724.277141] env[68217]: _type = "Task" [ 724.277141] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.284463] env[68217]: DEBUG oslo_vmware.api [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960965, 'name': PowerOffVM_Task, 'duration_secs': 0.345578} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.285859] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 724.285859] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 724.285859] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3afd32cd-8596-48d2-8277-1bb9d8bb6bac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.291103] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960969, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.307174] env[68217]: DEBUG nova.network.neutron [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Updated VIF entry in instance network info cache for port c390985b-4383-44b2-881a-57ed086930c0. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 724.308863] env[68217]: DEBUG nova.network.neutron [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Updating instance_info_cache with network_info: [{"id": "c390985b-4383-44b2-881a-57ed086930c0", "address": "fa:16:3e:96:d3:15", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc390985b-43", "ovs_interfaceid": "c390985b-4383-44b2-881a-57ed086930c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.354813] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 724.354813] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 724.354813] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleting the datastore file [datastore1] 11f9c054-62b9-4ac9-9651-5c85e7a86663 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 724.354813] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ca0fc061-58cd-40ad-88f9-35edf53ecebf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.363013] env[68217]: DEBUG oslo_vmware.api [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for the task: (returnval){ [ 724.363013] env[68217]: value = "task-2960971" [ 724.363013] env[68217]: _type = "Task" [ 724.363013] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.366858] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.520s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 724.367514] env[68217]: DEBUG nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 724.371189] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.810s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 724.371578] env[68217]: DEBUG nova.objects.instance [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lazy-loading 'resources' on Instance uuid 366c780a-2870-4e6e-8cfe-7eec10c363d5 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 724.379701] env[68217]: DEBUG oslo_vmware.api [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.554677] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52843c5b-6a2a-aea0-42b5-5521acc37f55, 'name': SearchDatastore_Task, 'duration_secs': 0.010076} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.555930] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.555930] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 724.555930] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-697a56c9-4f4a-479b-8943-ce7a18c933ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.561845] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 724.561845] env[68217]: value = "task-2960972" [ 724.561845] env[68217]: _type = "Task" [ 724.561845] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 724.570079] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960972, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 724.791502] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960969, 'name': CreateSnapshot_Task, 'duration_secs': 0.469931} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.791844] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 724.792682] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0aae2b98-c675-47a1-8f19-e52b345297bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.809954] env[68217]: DEBUG oslo_concurrency.lockutils [req-3f754eff-3dab-4062-89ae-e5b46c738ce2 req-82b58cbb-e885-475c-b532-01418c2273b6 service nova] Releasing lock "refresh_cache-38787c7d-a9cf-4ce6-a112-c1ec259697ca" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 724.881753] env[68217]: DEBUG nova.compute.utils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 724.883604] env[68217]: DEBUG oslo_vmware.api [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Task: {'id': task-2960971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.186337} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 724.885046] env[68217]: DEBUG nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 724.885046] env[68217]: DEBUG nova.network.neutron [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 724.886718] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 724.886904] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 724.887089] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 724.887263] env[68217]: INFO nova.compute.manager [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Took 1.14 seconds to destroy the instance on the hypervisor. [ 724.887595] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 724.888497] env[68217]: DEBUG nova.compute.manager [-] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 724.888497] env[68217]: DEBUG nova.network.neutron [-] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 725.072573] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960972, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.218285] env[68217]: DEBUG nova.policy [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fcfd9e5288b4ee2b012a0a2cf242d7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bf05c5ad8574e0f858cd2261af9ef24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 725.320584] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 725.321897] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-997dd2b0-b52d-4e85-8695-41a5c244d753 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.331143] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 725.331143] env[68217]: value = "task-2960973" [ 725.331143] env[68217]: _type = "Task" [ 725.331143] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.342550] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960973, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.389927] env[68217]: DEBUG nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 725.409286] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1921cbef-aa33-4515-86a3-1ecd72f81d22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.418410] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14b58b52-fcb5-4120-a501-afc0e9692591 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.458897] env[68217]: DEBUG nova.compute.manager [req-583e329e-2cf2-4e65-bf0f-9f3e959ac3b0 req-6b1bd3bf-c57c-4054-89b1-4234d66a995e service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Received event network-vif-deleted-d2eec7fc-f623-4a8e-aee2-762e1eb58cf7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 725.460054] env[68217]: INFO nova.compute.manager [req-583e329e-2cf2-4e65-bf0f-9f3e959ac3b0 req-6b1bd3bf-c57c-4054-89b1-4234d66a995e service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Neutron deleted interface d2eec7fc-f623-4a8e-aee2-762e1eb58cf7; detaching it from the instance and deleting it from the info cache [ 725.460054] env[68217]: DEBUG nova.network.neutron [req-583e329e-2cf2-4e65-bf0f-9f3e959ac3b0 req-6b1bd3bf-c57c-4054-89b1-4234d66a995e service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.462359] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4796885-b562-4329-af95-404ce30c735a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.472693] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f516126e-c960-419f-9732-55f68bd6e86c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.488270] env[68217]: DEBUG nova.compute.provider_tree [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 725.573513] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960972, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514428} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 725.573888] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 725.574238] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 725.574574] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6ea7256d-e592-47fd-bd42-4c5074c3a8a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.581172] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 725.581172] env[68217]: value = "task-2960974" [ 725.581172] env[68217]: _type = "Task" [ 725.581172] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 725.588688] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960974, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.698741] env[68217]: DEBUG nova.network.neutron [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Successfully created port: b72a88d5-3b2e-461c-be4a-193dde782c4c {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 725.705832] env[68217]: DEBUG nova.network.neutron [-] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.841842] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960973, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 725.962919] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-905576d0-b2e0-45ce-b2b2-eae72289f442 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.976406] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1fde49-72f4-4b1c-bf17-911080b5dfd5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 725.992133] env[68217]: DEBUG nova.scheduler.client.report [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 726.014303] env[68217]: DEBUG nova.compute.manager [req-583e329e-2cf2-4e65-bf0f-9f3e959ac3b0 req-6b1bd3bf-c57c-4054-89b1-4234d66a995e service nova] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Detach interface failed, port_id=d2eec7fc-f623-4a8e-aee2-762e1eb58cf7, reason: Instance 11f9c054-62b9-4ac9-9651-5c85e7a86663 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 726.091493] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960974, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074939} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.091774] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 726.092585] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe9bcc23-8f61-4f92-9cc9-aafbfcf0c73b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.114288] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 726.114583] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-de92e949-0186-47fd-9bb9-723e7be331a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.137538] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 726.137538] env[68217]: value = "task-2960976" [ 726.137538] env[68217]: _type = "Task" [ 726.137538] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.146032] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960976, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.208498] env[68217]: INFO nova.compute.manager [-] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Took 1.32 seconds to deallocate network for instance. [ 726.342435] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960973, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.401194] env[68217]: DEBUG nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 726.424203] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 726.424438] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 726.424580] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 726.424752] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 726.424929] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 726.425041] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 726.425253] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 726.425414] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 726.425576] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 726.425732] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 726.427014] env[68217]: DEBUG nova.virt.hardware [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 726.427014] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16697570-4159-4cef-9231-59fe436543d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.435479] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4616be42-6bde-4e4d-9fcf-c5f21dd071e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.500104] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.129s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 726.502569] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 42.949s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 726.502834] env[68217]: DEBUG nova.objects.instance [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lazy-loading 'resources' on Instance uuid cf457d43-b939-4284-b84d-9075895e9dda {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 726.524882] env[68217]: INFO nova.scheduler.client.report [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Deleted allocations for instance 366c780a-2870-4e6e-8cfe-7eec10c363d5 [ 726.645193] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960976, 'name': ReconfigVM_Task, 'duration_secs': 0.45322} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 726.645473] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 726.646312] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1e356ba7-602e-4108-9603-313f8007af84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 726.652905] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 726.652905] env[68217]: value = "task-2960977" [ 726.652905] env[68217]: _type = "Task" [ 726.652905] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 726.662471] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960977, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.719731] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.845645] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960973, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 726.957838] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "e550084b-84dd-4ae8-8667-2edb45b49e2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 726.958287] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.034611] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea399312-0858-4ab4-bda2-aa749c02335a tempest-ServerAddressesTestJSON-1034861226 tempest-ServerAddressesTestJSON-1034861226-project-member] Lock "366c780a-2870-4e6e-8cfe-7eec10c363d5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 49.470s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.162537] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960977, 'name': Rename_Task, 'duration_secs': 0.176374} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.165327] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 727.165794] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61e9cead-1294-4287-8e84-eaf3da66290e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.172296] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 727.172296] env[68217]: value = "task-2960978" [ 727.172296] env[68217]: _type = "Task" [ 727.172296] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.185765] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960978, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.324837] env[68217]: DEBUG nova.compute.manager [req-a543ec2f-ae73-4bbf-8e75-ad9a0ed0df06 req-a2e07fc3-6d81-4848-a1b9-a0f9b3f6b663 service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Received event network-vif-plugged-b72a88d5-3b2e-461c-be4a-193dde782c4c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 727.325078] env[68217]: DEBUG oslo_concurrency.lockutils [req-a543ec2f-ae73-4bbf-8e75-ad9a0ed0df06 req-a2e07fc3-6d81-4848-a1b9-a0f9b3f6b663 service nova] Acquiring lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 727.325317] env[68217]: DEBUG oslo_concurrency.lockutils [req-a543ec2f-ae73-4bbf-8e75-ad9a0ed0df06 req-a2e07fc3-6d81-4848-a1b9-a0f9b3f6b663 service nova] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 727.325493] env[68217]: DEBUG oslo_concurrency.lockutils [req-a543ec2f-ae73-4bbf-8e75-ad9a0ed0df06 req-a2e07fc3-6d81-4848-a1b9-a0f9b3f6b663 service nova] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 727.325660] env[68217]: DEBUG nova.compute.manager [req-a543ec2f-ae73-4bbf-8e75-ad9a0ed0df06 req-a2e07fc3-6d81-4848-a1b9-a0f9b3f6b663 service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] No waiting events found dispatching network-vif-plugged-b72a88d5-3b2e-461c-be4a-193dde782c4c {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 727.325821] env[68217]: WARNING nova.compute.manager [req-a543ec2f-ae73-4bbf-8e75-ad9a0ed0df06 req-a2e07fc3-6d81-4848-a1b9-a0f9b3f6b663 service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Received unexpected event network-vif-plugged-b72a88d5-3b2e-461c-be4a-193dde782c4c for instance with vm_state building and task_state spawning. [ 727.343803] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960973, 'name': CloneVM_Task, 'duration_secs': 1.867504} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 727.348451] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Created linked-clone VM from snapshot [ 727.350229] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e6304e-5e90-41ac-a949-bb04efa98aef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.358834] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Uploading image 2f498595-328c-49f2-8657-1cc0a8595983 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 727.388574] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 727.388574] env[68217]: value = "vm-594211" [ 727.388574] env[68217]: _type = "VirtualMachine" [ 727.388574] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 727.388856] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-fd78716d-676a-4095-8fba-73a35282c7f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.396604] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease: (returnval){ [ 727.396604] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523faa8f-3cfe-e8c1-09d0-9476a5b8e13f" [ 727.396604] env[68217]: _type = "HttpNfcLease" [ 727.396604] env[68217]: } obtained for exporting VM: (result){ [ 727.396604] env[68217]: value = "vm-594211" [ 727.396604] env[68217]: _type = "VirtualMachine" [ 727.396604] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 727.396604] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the lease: (returnval){ [ 727.396604] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523faa8f-3cfe-e8c1-09d0-9476a5b8e13f" [ 727.396604] env[68217]: _type = "HttpNfcLease" [ 727.396604] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 727.405625] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 727.405625] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523faa8f-3cfe-e8c1-09d0-9476a5b8e13f" [ 727.405625] env[68217]: _type = "HttpNfcLease" [ 727.405625] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 727.488590] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0fd994ba-e99e-413b-9524-34e2aebfb091 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.497115] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27dc0134-ea34-4587-a2c5-298edf773df9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.538866] env[68217]: DEBUG nova.network.neutron [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Successfully updated port: b72a88d5-3b2e-461c-be4a-193dde782c4c {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 727.540783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a56cb92-c9c8-4eb7-ac2a-dad86d0a67de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.549722] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4074a82a-91f1-492e-b3e7-0b4c3de0bf15 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.568599] env[68217]: DEBUG nova.compute.provider_tree [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.683793] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960978, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 727.906749] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 727.906749] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523faa8f-3cfe-e8c1-09d0-9476a5b8e13f" [ 727.906749] env[68217]: _type = "HttpNfcLease" [ 727.906749] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 727.907050] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 727.907050] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523faa8f-3cfe-e8c1-09d0-9476a5b8e13f" [ 727.907050] env[68217]: _type = "HttpNfcLease" [ 727.907050] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 727.907771] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b5a0be6-aef9-4a0f-adf3-03eea32b7760 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.915624] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527c3f1e-6f98-18b6-4e76-745fe1935ca0/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 727.915745] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527c3f1e-6f98-18b6-4e76-745fe1935ca0/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 728.015818] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-7f378a6a-80fb-46ba-b60c-ec637154de75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.045316] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "refresh_cache-3d03e0b7-0469-4041-a7d5-7768326eb3b5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.045475] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "refresh_cache-3d03e0b7-0469-4041-a7d5-7768326eb3b5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 728.045540] env[68217]: DEBUG nova.network.neutron [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 728.071531] env[68217]: DEBUG nova.scheduler.client.report [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 728.184578] env[68217]: DEBUG oslo_vmware.api [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960978, 'name': PowerOnVM_Task, 'duration_secs': 0.908984} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.184962] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 728.185057] env[68217]: INFO nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Took 8.38 seconds to spawn the instance on the hypervisor. [ 728.185231] env[68217]: DEBUG nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 728.186055] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba488c9-10b4-4b8e-915b-f12ff8e8a79a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.197338] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 728.197338] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594209', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'name': 'volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af11d05f-4432-4505-bb52-226414488960', 'attached_at': '', 'detached_at': '', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'serial': '7a0a3945-c469-4d2a-a6b4-db90a9398d63'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 728.197783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b70f7aa-f076-4228-ae00-896f2c20e468 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.228463] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15533dbd-13db-41e8-b9d8-4209bf79260a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.255691] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63/volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 728.258977] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d0a76182-8631-44a7-87e3-eb6361823e87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.278967] env[68217]: DEBUG oslo_vmware.api [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Waiting for the task: (returnval){ [ 728.278967] env[68217]: value = "task-2960980" [ 728.278967] env[68217]: _type = "Task" [ 728.278967] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.287777] env[68217]: DEBUG oslo_vmware.api [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960980, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.578149] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.075s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 728.582340] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 42.689s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 728.610068] env[68217]: INFO nova.scheduler.client.report [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Deleted allocations for instance cf457d43-b939-4284-b84d-9075895e9dda [ 728.637761] env[68217]: DEBUG nova.network.neutron [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 728.721257] env[68217]: INFO nova.compute.manager [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Took 50.49 seconds to build instance. [ 728.791676] env[68217]: DEBUG oslo_vmware.api [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960980, 'name': ReconfigVM_Task, 'duration_secs': 0.49842} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.794876] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Reconfigured VM instance instance-0000000e to attach disk [datastore1] volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63/volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 728.800265] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02823fdb-a97d-4830-bd6b-0c11c4f134b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.818599] env[68217]: DEBUG oslo_vmware.api [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Waiting for the task: (returnval){ [ 728.818599] env[68217]: value = "task-2960981" [ 728.818599] env[68217]: _type = "Task" [ 728.818599] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.828060] env[68217]: DEBUG oslo_vmware.api [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960981, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.926430] env[68217]: DEBUG nova.network.neutron [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Updating instance_info_cache with network_info: [{"id": "b72a88d5-3b2e-461c-be4a-193dde782c4c", "address": "fa:16:3e:45:64:e6", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb72a88d5-3b", "ovs_interfaceid": "b72a88d5-3b2e-461c-be4a-193dde782c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 729.118452] env[68217]: DEBUG oslo_concurrency.lockutils [None req-442f57ce-bed1-4b43-b091-cc51eb3ff1b7 tempest-DeleteServersAdminTestJSON-1006304365 tempest-DeleteServersAdminTestJSON-1006304365-project-member] Lock "cf457d43-b939-4284-b84d-9075895e9dda" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 48.951s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.225484] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3e934d56-89e7-461e-981f-0f9210b11bb7 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.505s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 729.330308] env[68217]: DEBUG oslo_vmware.api [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960981, 'name': ReconfigVM_Task, 'duration_secs': 0.163027} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.330386] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594209', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'name': 'volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af11d05f-4432-4505-bb52-226414488960', 'attached_at': '', 'detached_at': '', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'serial': '7a0a3945-c469-4d2a-a6b4-db90a9398d63'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 729.362779] env[68217]: DEBUG nova.compute.manager [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Received event network-changed-b72a88d5-3b2e-461c-be4a-193dde782c4c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 729.362978] env[68217]: DEBUG nova.compute.manager [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Refreshing instance network info cache due to event network-changed-b72a88d5-3b2e-461c-be4a-193dde782c4c. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 729.363218] env[68217]: DEBUG oslo_concurrency.lockutils [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] Acquiring lock "refresh_cache-3d03e0b7-0469-4041-a7d5-7768326eb3b5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 729.431463] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "refresh_cache-3d03e0b7-0469-4041-a7d5-7768326eb3b5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 729.431827] env[68217]: DEBUG nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Instance network_info: |[{"id": "b72a88d5-3b2e-461c-be4a-193dde782c4c", "address": "fa:16:3e:45:64:e6", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb72a88d5-3b", "ovs_interfaceid": "b72a88d5-3b2e-461c-be4a-193dde782c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 729.432137] env[68217]: DEBUG oslo_concurrency.lockutils [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] Acquired lock "refresh_cache-3d03e0b7-0469-4041-a7d5-7768326eb3b5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 729.432316] env[68217]: DEBUG nova.network.neutron [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Refreshing network info cache for port b72a88d5-3b2e-461c-be4a-193dde782c4c {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 729.433604] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:45:64:e6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd669e36a-5c9d-4fa4-92c8-90e7cb814262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b72a88d5-3b2e-461c-be4a-193dde782c4c', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 729.444208] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Creating folder: Project (3bf05c5ad8574e0f858cd2261af9ef24). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.444374] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0b08643a-087a-445a-a989-88a2b055dd54 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.456031] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Created folder: Project (3bf05c5ad8574e0f858cd2261af9ef24) in parent group-v594094. [ 729.456031] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Creating folder: Instances. Parent ref: group-v594212. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 729.456271] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-59ba681e-b799-4e0e-aa3a-375e6e02e366 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.464886] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Created folder: Instances in parent group-v594212. [ 729.465205] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 729.465423] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 729.466178] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3e089a8-5f55-46c6-9df1-872569adad32 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.487722] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 729.487722] env[68217]: value = "task-2960984" [ 729.487722] env[68217]: _type = "Task" [ 729.487722] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.496297] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960984, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.735036] env[68217]: DEBUG nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 729.999360] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960984, 'name': CreateVM_Task, 'duration_secs': 0.379999} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.001850] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 730.002601] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.002769] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.003084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 730.003705] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4fd43078-b097-4498-98c1-8410eab8fa38 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.011710] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 730.011710] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c22e5f-b08a-99fc-5c32-2ba7835f17e9" [ 730.011710] env[68217]: _type = "Task" [ 730.011710] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.022065] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c22e5f-b08a-99fc-5c32-2ba7835f17e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.136354] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 11f9c054-62b9-4ac9-9651-5c85e7a86663 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.136564] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.136760] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.136833] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance af11d05f-4432-4505-bb52-226414488960 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.136962] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 83d32dd6-2629-4451-a746-bf5270083e2a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.137092] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance dfeeed37-8c84-4ecc-87ea-f4239f512fb1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.137220] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance db4cf157-9511-423c-aa41-433af8d92b48 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.137339] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance f748cf37-6605-49a2-a418-51667a0fac4a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.137463] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 1675982e-0702-482b-9fe6-fd4eb9d83311 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.137841] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a4a88f10-937a-4fa6-aa15-eb7f669e77d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.137841] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 58c15727-79ae-404f-a054-d71e3be498cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.137841] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance dc45d268-7a7f-4e65-b6fa-942ddba69b03 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 730.137992] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b5e15801-301a-4ee6-87d2-bbf749967631 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.138127] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e089c20e-b788-4e6c-9bd2-9ad485305582 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.138235] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance da1524a7-2756-4429-ada2-b1f493544bd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.138354] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 9d2b3670-ef8a-477a-b876-7a8fe37fa065 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.138474] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 38787c7d-a9cf-4ce6-a112-c1ec259697ca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.138728] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 3d03e0b7-0469-4041-a7d5-7768326eb3b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 730.180543] env[68217]: INFO nova.compute.manager [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Rebuilding instance [ 730.243468] env[68217]: DEBUG nova.compute.manager [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 730.244363] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6d3e6f2-0e90-4894-aa0a-e248950291a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.266888] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 730.314857] env[68217]: DEBUG nova.network.neutron [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Updated VIF entry in instance network info cache for port b72a88d5-3b2e-461c-be4a-193dde782c4c. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 730.315222] env[68217]: DEBUG nova.network.neutron [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Updating instance_info_cache with network_info: [{"id": "b72a88d5-3b2e-461c-be4a-193dde782c4c", "address": "fa:16:3e:45:64:e6", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb72a88d5-3b", "ovs_interfaceid": "b72a88d5-3b2e-461c-be4a-193dde782c4c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.387368] env[68217]: DEBUG nova.objects.instance [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lazy-loading 'flavor' on Instance uuid af11d05f-4432-4505-bb52-226414488960 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 730.526329] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c22e5f-b08a-99fc-5c32-2ba7835f17e9, 'name': SearchDatastore_Task, 'duration_secs': 0.012611} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.526614] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.526889] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 730.527743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.527975] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 730.528219] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 730.528653] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0945e6db-a420-47f2-8db1-37e3eedd251a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.538225] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 730.538549] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 730.539357] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-213513f5-3faa-49f9-9b62-ed637a7a9851 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.545377] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 730.545377] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fd3c32-b975-c74e-7541-9efb6242c862" [ 730.545377] env[68217]: _type = "Task" [ 730.545377] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.554196] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fd3c32-b975-c74e-7541-9efb6242c862, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.646671] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 580e6909-7d05-447a-a378-f0b8b71f059a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 730.818656] env[68217]: DEBUG oslo_concurrency.lockutils [req-41b3161d-f479-45e9-9166-d23607e536d1 req-f8895f9a-5d70-4281-ba43-fc3301026bfd service nova] Releasing lock "refresh_cache-3d03e0b7-0469-4041-a7d5-7768326eb3b5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 730.892695] env[68217]: DEBUG oslo_concurrency.lockutils [None req-756042b9-9baa-464d-9321-bb5c3ced49f7 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lock "af11d05f-4432-4505-bb52-226414488960" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.325s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 731.056715] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fd3c32-b975-c74e-7541-9efb6242c862, 'name': SearchDatastore_Task, 'duration_secs': 0.014727} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.057677] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0903007d-09cb-4081-96bd-aee69bea4063 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.063136] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 731.063136] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521a2492-46dc-0c06-d19b-ef702908d150" [ 731.063136] env[68217]: _type = "Task" [ 731.063136] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.071713] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521a2492-46dc-0c06-d19b-ef702908d150, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.151305] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 71243775-e8df-4cc5-85c9-d64a244b4426 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.264714] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 731.265060] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79ca3ad1-187b-48db-9d43-fd33049d16a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.273210] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 731.273210] env[68217]: value = "task-2960985" [ 731.273210] env[68217]: _type = "Task" [ 731.273210] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.282799] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960985, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.311472] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Acquiring lock "af11d05f-4432-4505-bb52-226414488960" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 731.311634] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lock "af11d05f-4432-4505-bb52-226414488960" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 731.573713] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521a2492-46dc-0c06-d19b-ef702908d150, 'name': SearchDatastore_Task, 'duration_secs': 0.022695} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.574098] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 731.574264] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 3d03e0b7-0469-4041-a7d5-7768326eb3b5/3d03e0b7-0469-4041-a7d5-7768326eb3b5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 731.574605] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2dace9fc-0754-40b1-bba5-1bc49411bf5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.582853] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 731.582853] env[68217]: value = "task-2960986" [ 731.582853] env[68217]: _type = "Task" [ 731.582853] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.592724] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960986, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.653835] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d0d8ed27-003e-43e2-8a07-041420a2c758 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 731.788253] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960985, 'name': PowerOffVM_Task, 'duration_secs': 0.229593} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.788456] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 731.788717] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 731.789562] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7f3f00-68fb-4fe5-8dec-3fc30cae7ca0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.798850] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 731.799130] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3ae30724-2ed9-43ac-a0af-2c4e861e6d2d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.814406] env[68217]: INFO nova.compute.manager [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Detaching volume 7a0a3945-c469-4d2a-a6b4-db90a9398d63 [ 731.853645] env[68217]: INFO nova.virt.block_device [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Attempting to driver detach volume 7a0a3945-c469-4d2a-a6b4-db90a9398d63 from mountpoint /dev/sdb [ 731.853913] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 731.854089] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594209', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'name': 'volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af11d05f-4432-4505-bb52-226414488960', 'attached_at': '', 'detached_at': '', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'serial': '7a0a3945-c469-4d2a-a6b4-db90a9398d63'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 731.854965] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f3b2739-6bfa-4e7c-aeba-14c1c25ba103 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.879059] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad80eb1-4446-4d28-9164-ae644138f025 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.881917] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 731.882366] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 731.882578] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleting the datastore file [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 731.882821] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-be2653f4-9869-4ac6-92c2-5c41d9be559f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.889719] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc54765d-a081-411d-8f19-5f78dced3546 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.892450] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 731.892450] env[68217]: value = "task-2960988" [ 731.892450] env[68217]: _type = "Task" [ 731.892450] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.912450] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1822b7e8-9a6b-44ef-93fc-bb79e89ddfa3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.918007] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.931295] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] The volume has not been displaced from its original location: [datastore1] volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63/volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 731.936972] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Reconfiguring VM instance instance-0000000e to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 731.937396] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0171e7db-b2b5-4241-a267-a129e7e6214c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.957402] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Waiting for the task: (returnval){ [ 731.957402] env[68217]: value = "task-2960989" [ 731.957402] env[68217]: _type = "Task" [ 731.957402] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.966312] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960989, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.095430] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960986, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.156613] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7ec30097-1151-4b0d-8226-e4d34ea7b3c9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 732.404994] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960988, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.467253] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960989, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.594830] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960986, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.785902} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.596034] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 3d03e0b7-0469-4041-a7d5-7768326eb3b5/3d03e0b7-0469-4041-a7d5-7768326eb3b5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 732.596034] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 732.596034] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-08d80dfd-2422-47c3-8688-97140067deca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.602526] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 732.602526] env[68217]: value = "task-2960990" [ 732.602526] env[68217]: _type = "Task" [ 732.602526] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.610791] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.660195] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 0552d616-a406-4dfa-8a70-82f39fb98bbc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 732.904267] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960988, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.561028} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.904680] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 732.904947] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 732.905197] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 732.967422] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960989, 'name': ReconfigVM_Task, 'duration_secs': 0.529271} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.968339] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Reconfigured VM instance instance-0000000e to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 732.973473] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4183a89f-6f4e-496e-a189-efa699d1c6ff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.989766] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Waiting for the task: (returnval){ [ 732.989766] env[68217]: value = "task-2960991" [ 732.989766] env[68217]: _type = "Task" [ 732.989766] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.999045] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960991, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.112105] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.107824} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.112386] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 733.113161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d99ae77e-cff9-412d-a9c0-973d07bfa33e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.134969] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Reconfiguring VM instance instance-00000027 to attach disk [datastore1] 3d03e0b7-0469-4041-a7d5-7768326eb3b5/3d03e0b7-0469-4041-a7d5-7768326eb3b5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 733.136324] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f1ec331b-fc4d-41e2-956c-c7be99146bfa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.152102] env[68217]: DEBUG nova.objects.instance [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lazy-loading 'flavor' on Instance uuid e089c20e-b788-4e6c-9bd2-9ad485305582 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 733.158844] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 733.158844] env[68217]: value = "task-2960992" [ 733.158844] env[68217]: _type = "Task" [ 733.158844] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.163088] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d14026b1-84dd-430e-be94-94dcb1f47473 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 733.172060] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960992, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.502561] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960991, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.657869] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.658082] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquired lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 733.666043] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 156ea1ad-6e52-4848-915d-7ba74c606e6e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 733.670160] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960992, 'name': ReconfigVM_Task, 'duration_secs': 0.347309} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.670684] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Reconfigured VM instance instance-00000027 to attach disk [datastore1] 3d03e0b7-0469-4041-a7d5-7768326eb3b5/3d03e0b7-0469-4041-a7d5-7768326eb3b5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 733.671346] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-58589bf4-455d-4c39-8b87-1d1bfddbd0c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.679115] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 733.679115] env[68217]: value = "task-2960993" [ 733.679115] env[68217]: _type = "Task" [ 733.679115] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.690108] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960993, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.945489] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 733.945810] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 733.945810] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 733.946108] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 733.946284] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 733.946675] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 733.946997] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 733.947189] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 733.947365] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 733.947526] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 733.947703] env[68217]: DEBUG nova.virt.hardware [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 733.950549] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cf9016-addc-4b57-83b5-7b27c31e0e62 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.959934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0127a3c8-cf7c-45cf-aa73-4ebde08817a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.976727] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:96:d3:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c390985b-4383-44b2-881a-57ed086930c0', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 733.985336] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 733.985623] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 733.985853] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9000ce7-d608-4cf4-a119-c7e0f932d838 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.013182] env[68217]: DEBUG oslo_vmware.api [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Task: {'id': task-2960991, 'name': ReconfigVM_Task, 'duration_secs': 0.586652} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.014483] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] [instance: af11d05f-4432-4505-bb52-226414488960] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594209', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'name': 'volume-7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'af11d05f-4432-4505-bb52-226414488960', 'attached_at': '', 'detached_at': '', 'volume_id': '7a0a3945-c469-4d2a-a6b4-db90a9398d63', 'serial': '7a0a3945-c469-4d2a-a6b4-db90a9398d63'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 734.016662] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 734.016662] env[68217]: value = "task-2960994" [ 734.016662] env[68217]: _type = "Task" [ 734.016662] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.024068] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "bd62c682-24f2-4559-887a-03186409f699" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 734.024311] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 734.030431] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960994, 'name': CreateVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.171429] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7584180b-efa6-4038-9f3a-619ab7937553 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.191314] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960993, 'name': Rename_Task, 'duration_secs': 0.285308} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.191780] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 734.192011] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6dc23030-539b-4645-8710-984e2c97754b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.199009] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 734.199009] env[68217]: value = "task-2960995" [ 734.199009] env[68217]: _type = "Task" [ 734.199009] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 734.208426] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960995, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.498970] env[68217]: DEBUG nova.network.neutron [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 734.534859] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960994, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 734.582655] env[68217]: DEBUG nova.objects.instance [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lazy-loading 'flavor' on Instance uuid af11d05f-4432-4505-bb52-226414488960 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 734.586270] env[68217]: DEBUG nova.compute.manager [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Received event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 734.586503] env[68217]: DEBUG nova.compute.manager [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing instance network info cache due to event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 734.586728] env[68217]: DEBUG oslo_concurrency.lockutils [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] Acquiring lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 734.680711] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 734.710580] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960995, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.029168] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2960994, 'name': CreateVM_Task, 'duration_secs': 0.529649} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.029622] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 735.029992] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.030142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.030466] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 735.030744] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbd07a6f-8779-4b35-8b61-60991bb0234d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.035742] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 735.035742] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5287fd49-21fb-79ca-8b34-1a055b1ca76d" [ 735.035742] env[68217]: _type = "Task" [ 735.035742] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.046790] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5287fd49-21fb-79ca-8b34-1a055b1ca76d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.184389] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 693d6a74-a671-4d02-8798-cd3975507428 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.211499] env[68217]: DEBUG oslo_vmware.api [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2960995, 'name': PowerOnVM_Task, 'duration_secs': 0.52225} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.211826] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 735.212038] env[68217]: INFO nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Took 8.81 seconds to spawn the instance on the hypervisor. [ 735.212225] env[68217]: DEBUG nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 735.213238] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7ce63b-736b-42a1-8f1d-4e9365d1801a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.282872] env[68217]: DEBUG nova.network.neutron [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 735.547592] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5287fd49-21fb-79ca-8b34-1a055b1ca76d, 'name': SearchDatastore_Task, 'duration_secs': 0.016157} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 735.547961] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.548236] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 735.548487] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 735.548633] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.548818] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 735.549136] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22f9f596-c4e2-437f-96f2-4675e2551d41 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.558532] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 735.558754] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 735.559524] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ad641122-71b6-4575-a9cd-6d65912ccb9c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.564383] env[68217]: DEBUG nova.objects.instance [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lazy-loading 'flavor' on Instance uuid e089c20e-b788-4e6c-9bd2-9ad485305582 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 735.569243] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 735.569243] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52224c8d-0d9f-1e6a-5492-558b56274cd9" [ 735.569243] env[68217]: _type = "Task" [ 735.569243] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.577547] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52224c8d-0d9f-1e6a-5492-558b56274cd9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 735.594128] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32744564-bb8a-41b9-ac49-d44dfeb0ba67 tempest-VolumesAssistedSnapshotsTest-1843694040 tempest-VolumesAssistedSnapshotsTest-1843694040-project-admin] Lock "af11d05f-4432-4505-bb52-226414488960" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.282s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 735.687602] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 735.734154] env[68217]: INFO nova.compute.manager [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Took 55.09 seconds to build instance. [ 735.783966] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Releasing lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 735.784235] env[68217]: DEBUG nova.compute.manager [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Inject network info {{(pid=68217) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 735.784546] env[68217]: DEBUG nova.compute.manager [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] network_info to inject: |[{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 735.791203] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Reconfiguring VM instance to set the machine id {{(pid=68217) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 735.791487] env[68217]: DEBUG oslo_concurrency.lockutils [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] Acquired lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 735.791736] env[68217]: DEBUG nova.network.neutron [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 735.792959] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ee45883-136c-424e-982d-1f0ed242f410 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 735.813288] env[68217]: DEBUG oslo_vmware.api [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 735.813288] env[68217]: value = "task-2960997" [ 735.813288] env[68217]: _type = "Task" [ 735.813288] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 735.823185] env[68217]: DEBUG oslo_vmware.api [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960997, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.070181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.081173] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52224c8d-0d9f-1e6a-5492-558b56274cd9, 'name': SearchDatastore_Task, 'duration_secs': 0.013546} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.084876] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b687f62d-4c3e-40f1-b5dc-a8d595607dd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.092293] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 736.092293] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529fef96-2267-8020-b630-cf1fda40e24d" [ 736.092293] env[68217]: _type = "Task" [ 736.092293] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.100253] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529fef96-2267-8020-b630-cf1fda40e24d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.120947] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527c3f1e-6f98-18b6-4e76-745fe1935ca0/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 736.121814] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d442794f-a278-45af-9485-b31f9a5a253d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.128561] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527c3f1e-6f98-18b6-4e76-745fe1935ca0/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 736.128753] env[68217]: ERROR oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527c3f1e-6f98-18b6-4e76-745fe1935ca0/disk-0.vmdk due to incomplete transfer. [ 736.128974] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-5755474c-7c15-4ac2-b4b7-681eb77bb95a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.138820] env[68217]: DEBUG oslo_vmware.rw_handles [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527c3f1e-6f98-18b6-4e76-745fe1935ca0/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 736.139467] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Uploaded image 2f498595-328c-49f2-8657-1cc0a8595983 to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 736.141413] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 736.141713] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-886c1bc6-1b2b-4366-acfc-7f218e2e82a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.147892] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 736.147892] env[68217]: value = "task-2960998" [ 736.147892] env[68217]: _type = "Task" [ 736.147892] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.155571] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960998, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.173025] env[68217]: DEBUG nova.network.neutron [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updated VIF entry in instance network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 736.173399] env[68217]: DEBUG nova.network.neutron [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}, {"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 736.191537] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e550084b-84dd-4ae8-8667-2edb45b49e2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 736.191808] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 736.191923] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 736.235667] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d6fdda11-1326-43d7-81fc-215b0df1c007 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 108.486s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 736.325306] env[68217]: DEBUG oslo_vmware.api [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2960997, 'name': ReconfigVM_Task, 'duration_secs': 0.195249} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.325613] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ea7139b3-8653-47c0-bb88-e0dcde90c0c0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Reconfigured VM instance to set the machine id {{(pid=68217) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 736.602729] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529fef96-2267-8020-b630-cf1fda40e24d, 'name': SearchDatastore_Task, 'duration_secs': 0.011853} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.603008] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.603285] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 736.603545] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3147a954-3d85-4fb3-bf07-f06a4ccf4586 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.606715] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d29819-a980-4e3f-ad66-eb1e761d4175 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.615838] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92acad4b-9bb9-4593-b2fb-842dcd6c0ec5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.619697] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 736.619697] env[68217]: value = "task-2960999" [ 736.619697] env[68217]: _type = "Task" [ 736.619697] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.660606] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75618a54-6baa-4255-929a-37ca0757b113 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.666455] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960999, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.671302] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2960998, 'name': Destroy_Task, 'duration_secs': 0.369743} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 736.673526] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Destroyed the VM [ 736.673785] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 736.674088] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6bc9c17c-cb0d-48d8-9616-09834b4fbc5f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.677178] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f3edc4-b10c-4670-8501-0052b079c6d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.681426] env[68217]: DEBUG oslo_concurrency.lockutils [req-279b8924-f933-4165-9512-f45755279e09 req-b6ca1352-151b-4bb5-96d6-1b62358ee21f service nova] Releasing lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 736.681844] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquired lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 736.695021] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.697733] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 736.697733] env[68217]: value = "task-2961000" [ 736.697733] env[68217]: _type = "Task" [ 736.697733] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 736.705996] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961000, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 736.741366] env[68217]: DEBUG nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 737.270481] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 737.298584] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961000, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.298584] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2960999, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.517803} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.298584] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 737.298864] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 737.299448] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c17d1b0f-6ab7-41e3-a38d-d74bed3dffe5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.302465] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.308784] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 737.308784] env[68217]: value = "task-2961001" [ 737.308784] env[68217]: _type = "Task" [ 737.308784] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.325690] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961001, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.380175] env[68217]: DEBUG nova.network.neutron [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 737.586528] env[68217]: DEBUG nova.compute.manager [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Received event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 737.586723] env[68217]: DEBUG nova.compute.manager [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing instance network info cache due to event network-changed-e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 737.586910] env[68217]: DEBUG oslo_concurrency.lockutils [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] Acquiring lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.778973] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 737.779219] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.197s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.779484] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 48.567s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.781018] env[68217]: INFO nova.compute.claims [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.790049] env[68217]: DEBUG oslo_vmware.api [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961000, 'name': RemoveSnapshot_Task, 'duration_secs': 0.68066} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.790049] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 737.790049] env[68217]: INFO nova.compute.manager [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Took 14.07 seconds to snapshot the instance on the hypervisor. [ 737.817781] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961001, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074588} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 737.819089] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 737.819867] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddb7183c-ff39-4bee-9a2f-de7fc05ed30e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.845492] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Reconfiguring VM instance instance-00000026 to attach disk [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 737.846309] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-981d94ca-9d12-4bbe-b627-3b83b6dd36ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.865352] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 737.865352] env[68217]: value = "task-2961002" [ 737.865352] env[68217]: _type = "Task" [ 737.865352] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 737.873293] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961002, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 737.980049] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.980301] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.980509] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 737.980716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 737.980890] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 737.985890] env[68217]: INFO nova.compute.manager [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Terminating instance [ 738.221163] env[68217]: DEBUG nova.network.neutron [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.295014] env[68217]: DEBUG nova.compute.manager [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Instance disappeared during snapshot {{(pid=68217) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 738.306290] env[68217]: DEBUG nova.compute.manager [None req-3ca63b75-8beb-4985-a907-f49bb56d8d66 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image not found during clean up 2f498595-328c-49f2-8657-1cc0a8595983 {{(pid=68217) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 738.377899] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961002, 'name': ReconfigVM_Task, 'duration_secs': 0.298861} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.378260] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Reconfigured VM instance instance-00000026 to attach disk [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca/38787c7d-a9cf-4ce6-a112-c1ec259697ca.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 738.379388] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98f4f343-46e2-46e9-ad27-39837122a23b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.388409] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 738.388409] env[68217]: value = "task-2961003" [ 738.388409] env[68217]: _type = "Task" [ 738.388409] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.399033] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961003, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.492224] env[68217]: DEBUG nova.compute.manager [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 738.492224] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 738.492562] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47b90e72-a788-4e15-91ff-adb24643ad55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.500124] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 738.500392] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-03730dda-3060-49aa-8d1c-5ee14eaf5f75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.568134] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 738.568883] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 738.568883] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleting the datastore file [datastore1] 9d2b3670-ef8a-477a-b876-7a8fe37fa065 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 738.569036] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-50252732-35b0-44ea-9047-95de00ffdae3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.577952] env[68217]: DEBUG oslo_vmware.api [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 738.577952] env[68217]: value = "task-2961005" [ 738.577952] env[68217]: _type = "Task" [ 738.577952] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.586614] env[68217]: DEBUG oslo_vmware.api [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961005, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.724029] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Releasing lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 738.724813] env[68217]: DEBUG nova.compute.manager [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Inject network info {{(pid=68217) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 738.724813] env[68217]: DEBUG nova.compute.manager [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] network_info to inject: |[{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 738.729535] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Reconfiguring VM instance to set the machine id {{(pid=68217) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 738.729777] env[68217]: DEBUG oslo_concurrency.lockutils [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] Acquired lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 738.729861] env[68217]: DEBUG nova.network.neutron [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Refreshing network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 738.731636] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86898fe0-cd0c-4b15-b60c-a5e46480b7f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.748928] env[68217]: DEBUG oslo_vmware.api [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 738.748928] env[68217]: value = "task-2961006" [ 738.748928] env[68217]: _type = "Task" [ 738.748928] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.761086] env[68217]: DEBUG oslo_vmware.api [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2961006, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 738.799685] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "41d279f2-477b-44b2-9eb9-7b782c9c890f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 738.800031] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 738.902438] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961003, 'name': Rename_Task, 'duration_secs': 0.149345} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 738.903374] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 738.903374] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72240b1b-f34b-46ad-9db0-868dee8f08c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.912997] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 738.912997] env[68217]: value = "task-2961007" [ 738.912997] env[68217]: _type = "Task" [ 738.912997] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 738.921339] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961007, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.089688] env[68217]: DEBUG oslo_vmware.api [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961005, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145406} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.089889] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 739.090296] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 739.090296] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.090368] env[68217]: INFO nova.compute.manager [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Took 0.60 seconds to destroy the instance on the hypervisor. [ 739.090650] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 739.090839] env[68217]: DEBUG nova.compute.manager [-] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 739.090927] env[68217]: DEBUG nova.network.neutron [-] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 739.164243] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "af11d05f-4432-4505-bb52-226414488960" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.164793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "af11d05f-4432-4505-bb52-226414488960" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.164793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "af11d05f-4432-4505-bb52-226414488960-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.164967] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "af11d05f-4432-4505-bb52-226414488960-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.165085] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "af11d05f-4432-4505-bb52-226414488960-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.169773] env[68217]: INFO nova.compute.manager [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Terminating instance [ 739.221559] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7767885-d6ab-4851-81a1-a56cf0010765 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.229177] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d02970b2-98c2-4960-9843-7b8005f7422a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.268681] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd821601-b4a9-425c-bf29-7966bbe962e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.285505] env[68217]: DEBUG oslo_vmware.api [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2961006, 'name': ReconfigVM_Task, 'duration_secs': 0.205528} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.287316] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-306e9e07-1d93-431d-bdd8-0b28f26f4065 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.291750] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d7a6eac7-bfe2-4b0e-b857-0f884b87557f tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Reconfigured VM instance to set the machine id {{(pid=68217) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 739.305237] env[68217]: DEBUG nova.compute.provider_tree [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 739.425290] env[68217]: DEBUG oslo_vmware.api [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961007, 'name': PowerOnVM_Task, 'duration_secs': 0.462005} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 739.425564] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 739.426185] env[68217]: DEBUG nova.compute.manager [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.426737] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-139e4f5d-2aa4-45bf-80a3-9824f8c12678 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.504229] env[68217]: DEBUG nova.network.neutron [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updated VIF entry in instance network info cache for port e1840648-4f87-4974-bc63-bd2b25acab29. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 739.504593] env[68217]: DEBUG nova.network.neutron [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [{"id": "e1840648-4f87-4974-bc63-bd2b25acab29", "address": "fa:16:3e:d3:ca:e4", "network": {"id": "72cf5b88-0408-457a-9587-2b78b9e3bafb", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1678018221-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.138", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a6cd1165637a44528a61171aef40a553", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "90c863af-25e3-4fc6-a125-8baa7540298c", "external-id": "nsx-vlan-transportzone-50", "segmentation_id": 50, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape1840648-4f", "ovs_interfaceid": "e1840648-4f87-4974-bc63-bd2b25acab29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.524275] env[68217]: DEBUG nova.compute.manager [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 739.525427] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-059d0126-e610-4a7e-8f5d-aa0aef64bdeb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.630960] env[68217]: DEBUG nova.compute.manager [req-67f183a7-3a3d-4e9f-832e-4764f5a184b4 req-4cbfcec4-7b50-40a6-8f62-fb5365367e91 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Received event network-vif-deleted-1f12aa8d-6df1-467f-b0b7-9994d13446cb {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 739.631174] env[68217]: INFO nova.compute.manager [req-67f183a7-3a3d-4e9f-832e-4764f5a184b4 req-4cbfcec4-7b50-40a6-8f62-fb5365367e91 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Neutron deleted interface 1f12aa8d-6df1-467f-b0b7-9994d13446cb; detaching it from the instance and deleting it from the info cache [ 739.631373] env[68217]: DEBUG nova.network.neutron [req-67f183a7-3a3d-4e9f-832e-4764f5a184b4 req-4cbfcec4-7b50-40a6-8f62-fb5365367e91 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.673366] env[68217]: DEBUG nova.compute.manager [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 739.673581] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 739.674465] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe030882-c9f7-440f-8bc7-f85bd7d6ab63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.682419] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 739.682656] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-206fd7b6-9bd0-4afa-808a-f87bb90a959d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.688595] env[68217]: DEBUG oslo_vmware.api [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 739.688595] env[68217]: value = "task-2961008" [ 739.688595] env[68217]: _type = "Task" [ 739.688595] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 739.696424] env[68217]: DEBUG oslo_vmware.api [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2961008, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 739.778089] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "e089c20e-b788-4e6c-9bd2-9ad485305582" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.778383] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.778611] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "e089c20e-b788-4e6c-9bd2-9ad485305582-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.778791] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 739.778956] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 739.781315] env[68217]: INFO nova.compute.manager [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Terminating instance [ 739.810306] env[68217]: DEBUG nova.scheduler.client.report [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 739.947266] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 739.959765] env[68217]: DEBUG nova.network.neutron [-] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.007112] env[68217]: DEBUG oslo_concurrency.lockutils [req-df698372-62cf-44a1-9867-2c5ccf7ff8e1 req-bb3bd06d-4fb6-4054-93e9-5e37d5901388 service nova] Releasing lock "refresh_cache-e089c20e-b788-4e6c-9bd2-9ad485305582" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 740.041210] env[68217]: INFO nova.compute.manager [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] instance snapshotting [ 740.044039] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb0ae960-6b91-4f4f-9a64-cdae225ccf9e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.063009] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71fa5ac0-4a8f-44bd-b070-7a1cb9743aa8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.134412] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-429542d5-f270-47cd-995e-6041ad349e52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.144478] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5826a00b-1057-443f-a2e3-c1f2faaf654b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.174995] env[68217]: DEBUG nova.compute.manager [req-67f183a7-3a3d-4e9f-832e-4764f5a184b4 req-4cbfcec4-7b50-40a6-8f62-fb5365367e91 service nova] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Detach interface failed, port_id=1f12aa8d-6df1-467f-b0b7-9994d13446cb, reason: Instance 9d2b3670-ef8a-477a-b876-7a8fe37fa065 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 740.197573] env[68217]: DEBUG oslo_vmware.api [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2961008, 'name': PowerOffVM_Task, 'duration_secs': 0.198467} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.197853] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 740.198044] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 740.198288] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca68a6dd-4706-4d59-b597-93b49e1483ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.264484] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 740.264685] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 740.264906] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Deleting the datastore file [datastore1] af11d05f-4432-4505-bb52-226414488960 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.265197] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd5b587e-2708-4512-a4d7-2878c10ff1fd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.271714] env[68217]: DEBUG oslo_vmware.api [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for the task: (returnval){ [ 740.271714] env[68217]: value = "task-2961010" [ 740.271714] env[68217]: _type = "Task" [ 740.271714] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.279423] env[68217]: DEBUG oslo_vmware.api [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2961010, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.285117] env[68217]: DEBUG nova.compute.manager [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 740.285327] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 740.286104] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f70450c-38a5-4a78-919a-11cc2fcd05b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.293139] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 740.293401] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cfe6cf08-d403-41a0-b48c-385630e0b179 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.299215] env[68217]: DEBUG oslo_vmware.api [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 740.299215] env[68217]: value = "task-2961011" [ 740.299215] env[68217]: _type = "Task" [ 740.299215] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.307926] env[68217]: DEBUG oslo_vmware.api [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2961011, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.316982] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.317583] env[68217]: DEBUG nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 740.320730] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.695s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.320954] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.323063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.977s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 740.324512] env[68217]: INFO nova.compute.claims [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 740.350188] env[68217]: INFO nova.scheduler.client.report [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted allocations for instance 1675982e-0702-482b-9fe6-fd4eb9d83311 [ 740.463399] env[68217]: INFO nova.compute.manager [-] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Took 1.37 seconds to deallocate network for instance. [ 740.576132] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 740.576132] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-82dd9f97-0b09-4c15-853a-0726bd89fcc8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.581839] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 740.581839] env[68217]: value = "task-2961012" [ 740.581839] env[68217]: _type = "Task" [ 740.581839] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.589562] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961012, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.782949] env[68217]: DEBUG oslo_vmware.api [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Task: {'id': task-2961010, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167963} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.783242] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 740.783375] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 740.784117] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 740.784217] env[68217]: INFO nova.compute.manager [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] [instance: af11d05f-4432-4505-bb52-226414488960] Took 1.11 seconds to destroy the instance on the hypervisor. [ 740.784405] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 740.784596] env[68217]: DEBUG nova.compute.manager [-] [instance: af11d05f-4432-4505-bb52-226414488960] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 740.784694] env[68217]: DEBUG nova.network.neutron [-] [instance: af11d05f-4432-4505-bb52-226414488960] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 740.808911] env[68217]: DEBUG oslo_vmware.api [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2961011, 'name': PowerOffVM_Task, 'duration_secs': 0.228322} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 740.808911] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 740.808911] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 740.809137] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6add1dca-1b91-484d-bf00-2bfa82f49f5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.829200] env[68217]: DEBUG nova.compute.utils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 740.830671] env[68217]: DEBUG nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 740.830865] env[68217]: DEBUG nova.network.neutron [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 740.858360] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a6c9b34f-a55f-4336-9dec-6a05e974b7a0 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "1675982e-0702-482b-9fe6-fd4eb9d83311" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.690s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 740.875150] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 740.875150] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 740.875150] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Deleting the datastore file [datastore2] e089c20e-b788-4e6c-9bd2-9ad485305582 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 740.875672] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6abe6207-a5ea-4b3a-9c40-25a43da7fd07 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.882132] env[68217]: DEBUG oslo_vmware.api [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for the task: (returnval){ [ 740.882132] env[68217]: value = "task-2961014" [ 740.882132] env[68217]: _type = "Task" [ 740.882132] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 740.894963] env[68217]: DEBUG oslo_vmware.api [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2961014, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 740.896994] env[68217]: DEBUG nova.policy [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 740.970697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.098734] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961012, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.336606] env[68217]: DEBUG nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 741.396097] env[68217]: DEBUG oslo_vmware.api [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Task: {'id': task-2961014, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200006} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 741.396464] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 741.396529] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 741.396677] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 741.396841] env[68217]: INFO nova.compute.manager [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Took 1.11 seconds to destroy the instance on the hypervisor. [ 741.397102] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 741.397341] env[68217]: DEBUG nova.compute.manager [-] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 741.397390] env[68217]: DEBUG nova.network.neutron [-] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 741.411678] env[68217]: DEBUG nova.network.neutron [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Successfully created port: 9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 741.594777] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961012, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 741.624340] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.624582] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.624778] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.624967] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 741.625133] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 741.627879] env[68217]: INFO nova.compute.manager [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Terminating instance [ 741.779180] env[68217]: DEBUG nova.network.neutron [-] [instance: af11d05f-4432-4505-bb52-226414488960] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.806341] env[68217]: DEBUG nova.compute.manager [req-3e3a11d0-7498-472e-a178-3c9fe0fd01f9 req-c25e8a36-79e8-454d-9d02-84d9b2483b5a service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Received event network-vif-deleted-199674eb-b628-4b78-a622-1e10863e5716 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 741.806341] env[68217]: INFO nova.compute.manager [req-3e3a11d0-7498-472e-a178-3c9fe0fd01f9 req-c25e8a36-79e8-454d-9d02-84d9b2483b5a service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Neutron deleted interface 199674eb-b628-4b78-a622-1e10863e5716; detaching it from the instance and deleting it from the info cache [ 741.806416] env[68217]: DEBUG nova.network.neutron [req-3e3a11d0-7498-472e-a178-3c9fe0fd01f9 req-c25e8a36-79e8-454d-9d02-84d9b2483b5a service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.856147] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59426d11-ce6a-4995-944a-71df01561509 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.869243] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ae4a04-2815-468a-ad39-5fc204674517 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.908713] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae3d7ec-35f9-4b76-a29c-66a774730c28 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.916854] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58530c7-d116-4159-92c5-ac3a999094a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.934082] env[68217]: DEBUG nova.compute.provider_tree [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 741.974112] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 741.974319] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.095897] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961012, 'name': CreateSnapshot_Task, 'duration_secs': 1.024155} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.096346] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 742.097434] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7ba4e3-b136-4b55-b081-ed1c011d9084 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.132921] env[68217]: DEBUG nova.compute.manager [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 742.132921] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 742.133701] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808daa6a-539b-4166-8ba2-3506a95a5bca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.141737] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 742.142073] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-82b22675-559f-4346-a0ef-2d15c3a9b032 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.151104] env[68217]: DEBUG oslo_vmware.api [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 742.151104] env[68217]: value = "task-2961015" [ 742.151104] env[68217]: _type = "Task" [ 742.151104] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.158061] env[68217]: DEBUG oslo_vmware.api [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961015, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.287026] env[68217]: INFO nova.compute.manager [-] [instance: af11d05f-4432-4505-bb52-226414488960] Took 1.50 seconds to deallocate network for instance. [ 742.312451] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f0618d50-7e63-4b11-8f69-fc8868a3b9b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.322335] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90aeaef4-8fb7-42d7-a6a4-58d7526da297 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.354335] env[68217]: DEBUG nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 742.356515] env[68217]: DEBUG nova.compute.manager [req-3e3a11d0-7498-472e-a178-3c9fe0fd01f9 req-c25e8a36-79e8-454d-9d02-84d9b2483b5a service nova] [instance: af11d05f-4432-4505-bb52-226414488960] Detach interface failed, port_id=199674eb-b628-4b78-a622-1e10863e5716, reason: Instance af11d05f-4432-4505-bb52-226414488960 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 742.375965] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 742.376265] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 742.376462] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 742.376675] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 742.376851] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 742.377034] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 742.377259] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 742.377417] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 742.377578] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 742.377831] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 742.377892] env[68217]: DEBUG nova.virt.hardware [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 742.378774] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9730321a-4058-4bb3-b65f-4d3f0ecd5df4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.386361] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396a60a5-6e78-4994-8ef1-8f4634e703fc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.412715] env[68217]: DEBUG nova.network.neutron [-] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.455063] env[68217]: ERROR nova.scheduler.client.report [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [req-c9360676-395f-4b88-a1c5-58b2efd4869d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c9360676-395f-4b88-a1c5-58b2efd4869d"}]} [ 742.472462] env[68217]: DEBUG nova.scheduler.client.report [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 742.487471] env[68217]: DEBUG nova.scheduler.client.report [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 742.487675] env[68217]: DEBUG nova.compute.provider_tree [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 742.500524] env[68217]: DEBUG nova.scheduler.client.report [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 742.523646] env[68217]: DEBUG nova.scheduler.client.report [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 742.618159] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 742.621133] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-bbf34598-5deb-4f4c-8634-654769e5fa65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.629654] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 742.629654] env[68217]: value = "task-2961016" [ 742.629654] env[68217]: _type = "Task" [ 742.629654] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.640030] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961016, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.660354] env[68217]: DEBUG oslo_vmware.api [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961015, 'name': PowerOffVM_Task, 'duration_secs': 0.209271} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 742.660618] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 742.660808] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 742.661119] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b963a216-8b27-4f45-b867-6d9d6815b15f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.742910] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 742.743486] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 742.743987] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleting the datastore file [datastore1] 38787c7d-a9cf-4ce6-a112-c1ec259697ca {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 742.744120] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf695094-bca0-4cc4-9fee-975d43d4621a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.755353] env[68217]: DEBUG oslo_vmware.api [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 742.755353] env[68217]: value = "task-2961018" [ 742.755353] env[68217]: _type = "Task" [ 742.755353] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 742.765164] env[68217]: DEBUG oslo_vmware.api [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961018, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 742.793312] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.819821] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "ba39e563-3e3a-40aa-815f-760f0f37a55d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 742.820249] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 742.916078] env[68217]: INFO nova.compute.manager [-] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Took 1.52 seconds to deallocate network for instance. [ 742.979665] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-214d131d-78e6-46c6-91f0-92ccaa995bb1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.988018] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4991c51c-ee29-463b-9630-ba8b97c59ad2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.022560] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-776b0c89-6620-42d5-8f6f-bdce854a23cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.030911] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eefcfd8-83e7-43d0-b176-45f2ea818eec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.045131] env[68217]: DEBUG nova.compute.provider_tree [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 743.139880] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961016, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.265159] env[68217]: DEBUG oslo_vmware.api [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961018, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.233271} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 743.265159] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 743.265370] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 743.265486] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.265658] env[68217]: INFO nova.compute.manager [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Took 1.13 seconds to destroy the instance on the hypervisor. [ 743.265897] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 743.266106] env[68217]: DEBUG nova.compute.manager [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 743.266200] env[68217]: DEBUG nova.network.neutron [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 743.372838] env[68217]: DEBUG nova.compute.manager [req-ae5aad5d-6aae-4bf2-bc21-d2956f6457a7 req-7f69987e-ecac-47f4-a854-545fc90f05b8 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received event network-vif-plugged-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.373119] env[68217]: DEBUG oslo_concurrency.lockutils [req-ae5aad5d-6aae-4bf2-bc21-d2956f6457a7 req-7f69987e-ecac-47f4-a854-545fc90f05b8 service nova] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.373292] env[68217]: DEBUG oslo_concurrency.lockutils [req-ae5aad5d-6aae-4bf2-bc21-d2956f6457a7 req-7f69987e-ecac-47f4-a854-545fc90f05b8 service nova] Lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 743.373456] env[68217]: DEBUG oslo_concurrency.lockutils [req-ae5aad5d-6aae-4bf2-bc21-d2956f6457a7 req-7f69987e-ecac-47f4-a854-545fc90f05b8 service nova] Lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 743.373630] env[68217]: DEBUG nova.compute.manager [req-ae5aad5d-6aae-4bf2-bc21-d2956f6457a7 req-7f69987e-ecac-47f4-a854-545fc90f05b8 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] No waiting events found dispatching network-vif-plugged-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 743.373798] env[68217]: WARNING nova.compute.manager [req-ae5aad5d-6aae-4bf2-bc21-d2956f6457a7 req-7f69987e-ecac-47f4-a854-545fc90f05b8 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received unexpected event network-vif-plugged-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 for instance with vm_state building and task_state spawning. [ 743.425902] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 743.456674] env[68217]: DEBUG nova.network.neutron [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Successfully updated port: 9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 743.588411] env[68217]: DEBUG nova.scheduler.client.report [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 68 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 743.588672] env[68217]: DEBUG nova.compute.provider_tree [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 68 to 69 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 743.588886] env[68217]: DEBUG nova.compute.provider_tree [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 743.641574] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961016, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 743.900691] env[68217]: DEBUG nova.compute.manager [req-6a4f8b6e-13a2-4389-bea1-8f8cc8eeefd8 req-07f1a149-9455-4518-9400-99a5bebce2d8 service nova] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Received event network-vif-deleted-e1840648-4f87-4974-bc63-bd2b25acab29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.900691] env[68217]: DEBUG nova.compute.manager [req-6a4f8b6e-13a2-4389-bea1-8f8cc8eeefd8 req-07f1a149-9455-4518-9400-99a5bebce2d8 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Received event network-vif-deleted-c390985b-4383-44b2-881a-57ed086930c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 743.900691] env[68217]: INFO nova.compute.manager [req-6a4f8b6e-13a2-4389-bea1-8f8cc8eeefd8 req-07f1a149-9455-4518-9400-99a5bebce2d8 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Neutron deleted interface c390985b-4383-44b2-881a-57ed086930c0; detaching it from the instance and deleting it from the info cache [ 743.900691] env[68217]: DEBUG nova.network.neutron [req-6a4f8b6e-13a2-4389-bea1-8f8cc8eeefd8 req-07f1a149-9455-4518-9400-99a5bebce2d8 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.959617] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 743.959783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 743.959937] env[68217]: DEBUG nova.network.neutron [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 744.046201] env[68217]: DEBUG nova.network.neutron [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.094366] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.771s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 744.095405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 49.692s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.096966] env[68217]: INFO nova.compute.claims [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 744.142385] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961016, 'name': CloneVM_Task, 'duration_secs': 1.311455} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 744.142518] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Created linked-clone VM from snapshot [ 744.143851] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a94bd42e-97f4-4692-ae52-66f168c8269b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.152333] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Uploading image d6b75fbd-7007-4a04-95fd-a752f2dc1cda {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 744.166124] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 744.166419] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-fa07e23e-e6e8-43ce-acdb-3508b56c20af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.174279] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 744.174279] env[68217]: value = "task-2961019" [ 744.174279] env[68217]: _type = "Task" [ 744.174279] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 744.182434] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961019, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 744.403598] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-816b819b-1c3b-48f8-b484-de9b1b8a7d0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.413071] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c11e5f-2ebb-4cb2-b9fb-5c2d50a2f3eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 744.443165] env[68217]: DEBUG nova.compute.manager [req-6a4f8b6e-13a2-4389-bea1-8f8cc8eeefd8 req-07f1a149-9455-4518-9400-99a5bebce2d8 service nova] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Detach interface failed, port_id=c390985b-4383-44b2-881a-57ed086930c0, reason: Instance 38787c7d-a9cf-4ce6-a112-c1ec259697ca could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 744.494234] env[68217]: DEBUG nova.network.neutron [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 744.548931] env[68217]: INFO nova.compute.manager [-] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Took 1.28 seconds to deallocate network for instance. [ 744.602572] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "416cbaab-e4f4-4c43-b256-b5534da7a8c3" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 744.602897] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "416cbaab-e4f4-4c43-b256-b5534da7a8c3" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 744.665022] env[68217]: DEBUG nova.network.neutron [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.688179] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961019, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.057400] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 745.108135] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "416cbaab-e4f4-4c43-b256-b5534da7a8c3" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 745.108703] env[68217]: DEBUG nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 745.167908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 745.168274] env[68217]: DEBUG nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Instance network_info: |[{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 745.168722] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cd:e0:cd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 745.176049] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Creating folder: Project (ef7e30ed571740f3b3ea6b24fc9c6e20). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.178588] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b0f3c46-0519-4090-8ddb-73913e77914c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.190442] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961019, 'name': Destroy_Task, 'duration_secs': 0.829503} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.192830] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Destroyed the VM [ 745.193080] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 745.194406] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-682dc4eb-591f-4a85-bcab-b1658d982f6a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.195882] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Created folder: Project (ef7e30ed571740f3b3ea6b24fc9c6e20) in parent group-v594094. [ 745.196075] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Creating folder: Instances. Parent ref: group-v594218. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 745.196286] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3d93515-d8c0-4df7-aa43-0c733f482a4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.202920] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 745.202920] env[68217]: value = "task-2961021" [ 745.202920] env[68217]: _type = "Task" [ 745.202920] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.209114] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Created folder: Instances in parent group-v594218. [ 745.209393] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 745.209865] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 745.210261] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8bb1a486-e51b-4e0b-b283-f37f93d2ba5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.229678] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961021, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.234993] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 745.234993] env[68217]: value = "task-2961023" [ 745.234993] env[68217]: _type = "Task" [ 745.234993] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.244294] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961023, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.400300] env[68217]: DEBUG nova.compute.manager [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received event network-changed-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 745.400439] env[68217]: DEBUG nova.compute.manager [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Refreshing instance network info cache due to event network-changed-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 745.400610] env[68217]: DEBUG oslo_concurrency.lockutils [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] Acquiring lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.400850] env[68217]: DEBUG oslo_concurrency.lockutils [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] Acquired lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.400976] env[68217]: DEBUG nova.network.neutron [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Refreshing network info cache for port 9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 745.557584] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9522237-598d-4f36-b983-2cb0671a176e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.565955] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbee33d7-d95b-47ad-b0ed-c93c47f73928 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.598940] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf53ea60-d8cf-4787-a8a5-f57b138a5767 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.606958] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813c81de-c319-4dba-a3c6-9ec4f4c964b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.622177] env[68217]: DEBUG nova.compute.utils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 745.624128] env[68217]: DEBUG nova.compute.provider_tree [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.625007] env[68217]: DEBUG nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 745.625210] env[68217]: DEBUG nova.network.neutron [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 745.662555] env[68217]: DEBUG nova.policy [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f0bbbf3285964377b51f684f330f6fa8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0e8c4e14c1fc4f9998c2af7a7169a1e7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 745.715371] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961021, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.744246] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961023, 'name': CreateVM_Task, 'duration_secs': 0.414432} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 745.744428] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 745.745142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 745.745301] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 745.745631] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 745.745888] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a2609a5-ddec-46c5-a7e2-b3b2b6870763 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.751032] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 745.751032] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea9f44-3bd5-9bc2-442f-adf70086bbdf" [ 745.751032] env[68217]: _type = "Task" [ 745.751032] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 745.761130] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea9f44-3bd5-9bc2-442f-adf70086bbdf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 745.952764] env[68217]: DEBUG nova.network.neutron [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Successfully created port: d0d0d745-839e-4300-96dc-96e3be561179 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 746.125853] env[68217]: DEBUG nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 746.131101] env[68217]: DEBUG nova.scheduler.client.report [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 746.171450] env[68217]: DEBUG nova.network.neutron [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updated VIF entry in instance network info cache for port 9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 746.171801] env[68217]: DEBUG nova.network.neutron [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.215150] env[68217]: DEBUG oslo_vmware.api [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961021, 'name': RemoveSnapshot_Task, 'duration_secs': 0.747317} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.215521] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 746.260067] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea9f44-3bd5-9bc2-442f-adf70086bbdf, 'name': SearchDatastore_Task, 'duration_secs': 0.013041} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.260717] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.260717] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 746.260926] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.261161] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 746.261391] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 746.261677] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7c5494f0-f431-4ce9-8432-02ffd3e916a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.270015] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 746.270232] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 746.270975] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31fffb3d-a40f-475e-9828-3e4ee92b4db4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.275985] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 746.275985] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52abe447-11a9-bacb-fdec-17c21e26c0cf" [ 746.275985] env[68217]: _type = "Task" [ 746.275985] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.282943] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52abe447-11a9-bacb-fdec-17c21e26c0cf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 746.638043] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 746.638584] env[68217]: DEBUG nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 746.647019] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.943s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 746.647019] env[68217]: INFO nova.compute.claims [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 746.673694] env[68217]: DEBUG oslo_concurrency.lockutils [req-b98897a1-ae70-4774-8d8d-4c93ec8ab5ab req-65cb9625-2da7-406d-a1de-f7d6b8d4ceb3 service nova] Releasing lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 746.720979] env[68217]: WARNING nova.compute.manager [None req-49d65b8c-020e-46f7-b0eb-7c064e8e56da tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Image not found during snapshot: nova.exception.ImageNotFound: Image d6b75fbd-7007-4a04-95fd-a752f2dc1cda could not be found. [ 746.786083] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52abe447-11a9-bacb-fdec-17c21e26c0cf, 'name': SearchDatastore_Task, 'duration_secs': 0.021142} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 746.786868] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-426a2854-0111-40c6-985d-e6d968cbe11d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.794958] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 746.794958] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d113d9-7391-3edf-db7f-0ecca74a4fcb" [ 746.794958] env[68217]: _type = "Task" [ 746.794958] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 746.806339] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d113d9-7391-3edf-db7f-0ecca74a4fcb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.146729] env[68217]: DEBUG nova.compute.utils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 747.148197] env[68217]: DEBUG nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 747.148412] env[68217]: DEBUG nova.network.neutron [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 747.152715] env[68217]: DEBUG nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 747.177778] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 747.178032] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 747.178193] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 747.178373] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 747.178538] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 747.178692] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 747.178903] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 747.179133] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 747.179310] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 747.179474] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 747.179646] env[68217]: DEBUG nova.virt.hardware [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 747.180785] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d49aa0-e3c9-4be8-be94-e7b708ac3149 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.189436] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3860a5e-62d2-4f97-bac7-7a74f03f17fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.218079] env[68217]: DEBUG nova.policy [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3cbeeb90f26e4395989c1f71f5efec17', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3fcea47a290440bcb11f3f962f8e6de5', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 747.296252] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.296730] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.296979] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.297188] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.297355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.302639] env[68217]: INFO nova.compute.manager [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Terminating instance [ 747.316670] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d113d9-7391-3edf-db7f-0ecca74a4fcb, 'name': SearchDatastore_Task, 'duration_secs': 0.012624} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.317425] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 747.317691] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 580e6909-7d05-447a-a378-f0b8b71f059a/580e6909-7d05-447a-a378-f0b8b71f059a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 747.317940] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8e1b54c9-141c-4054-8be8-c422165aeb9f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.324592] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 747.324592] env[68217]: value = "task-2961024" [ 747.324592] env[68217]: _type = "Task" [ 747.324592] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.332994] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961024, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.336010] env[68217]: DEBUG nova.compute.manager [req-3dfdab04-8e7d-416e-8253-50d8bcb58930 req-2cb20089-4e0e-483e-9dd7-9b7270a4b9be service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Received event network-vif-plugged-d0d0d745-839e-4300-96dc-96e3be561179 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 747.336304] env[68217]: DEBUG oslo_concurrency.lockutils [req-3dfdab04-8e7d-416e-8253-50d8bcb58930 req-2cb20089-4e0e-483e-9dd7-9b7270a4b9be service nova] Acquiring lock "71243775-e8df-4cc5-85c9-d64a244b4426-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 747.336407] env[68217]: DEBUG oslo_concurrency.lockutils [req-3dfdab04-8e7d-416e-8253-50d8bcb58930 req-2cb20089-4e0e-483e-9dd7-9b7270a4b9be service nova] Lock "71243775-e8df-4cc5-85c9-d64a244b4426-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 747.336572] env[68217]: DEBUG oslo_concurrency.lockutils [req-3dfdab04-8e7d-416e-8253-50d8bcb58930 req-2cb20089-4e0e-483e-9dd7-9b7270a4b9be service nova] Lock "71243775-e8df-4cc5-85c9-d64a244b4426-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 747.336755] env[68217]: DEBUG nova.compute.manager [req-3dfdab04-8e7d-416e-8253-50d8bcb58930 req-2cb20089-4e0e-483e-9dd7-9b7270a4b9be service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] No waiting events found dispatching network-vif-plugged-d0d0d745-839e-4300-96dc-96e3be561179 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 747.336922] env[68217]: WARNING nova.compute.manager [req-3dfdab04-8e7d-416e-8253-50d8bcb58930 req-2cb20089-4e0e-483e-9dd7-9b7270a4b9be service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Received unexpected event network-vif-plugged-d0d0d745-839e-4300-96dc-96e3be561179 for instance with vm_state building and task_state spawning. [ 747.422718] env[68217]: DEBUG nova.network.neutron [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Successfully updated port: d0d0d745-839e-4300-96dc-96e3be561179 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 747.613153] env[68217]: DEBUG nova.network.neutron [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Successfully created port: bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.659123] env[68217]: DEBUG nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 747.812023] env[68217]: DEBUG nova.compute.manager [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 747.812258] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 747.813554] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63095991-286e-40b2-b511-3b4aee38d88e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.820787] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.821137] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9b370c55-4b67-44a3-a972-66571b4de5d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.829634] env[68217]: DEBUG oslo_vmware.api [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 747.829634] env[68217]: value = "task-2961025" [ 747.829634] env[68217]: _type = "Task" [ 747.829634] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.837698] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961024, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.490055} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.840537] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 580e6909-7d05-447a-a378-f0b8b71f059a/580e6909-7d05-447a-a378-f0b8b71f059a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 747.840772] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 747.841990] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cf2db651-2cb8-4a53-9e3b-b311e389f057 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.845538] env[68217]: DEBUG oslo_vmware.api [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961025, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.850551] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 747.850551] env[68217]: value = "task-2961026" [ 747.850551] env[68217]: _type = "Task" [ 747.850551] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.861375] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961026, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.926597] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "refresh_cache-71243775-e8df-4cc5-85c9-d64a244b4426" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.926740] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquired lock "refresh_cache-71243775-e8df-4cc5-85c9-d64a244b4426" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 747.928575] env[68217]: DEBUG nova.network.neutron [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 748.154993] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2894630-c9bb-49a6-9fd0-2af482c28bff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.163618] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8122475a-2d78-454c-8f3c-062fddd156f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.198181] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19b9a7b-ce8c-4ef3-95ac-efc48ed02dd6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.207225] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8362febe-28d6-41d7-a2e1-a814ee8b0d77 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.222253] env[68217]: DEBUG nova.compute.provider_tree [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 748.339047] env[68217]: DEBUG oslo_vmware.api [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961025, 'name': PowerOffVM_Task, 'duration_secs': 0.173295} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.339461] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 748.339501] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 748.339714] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-598b291a-0706-4284-956f-b58b39cd2dd1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.360916] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961026, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073284} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.365019] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 748.365019] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d1bf063-9f26-4f3a-85c7-23127a726dae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.388157] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Reconfiguring VM instance instance-00000028 to attach disk [datastore2] 580e6909-7d05-447a-a378-f0b8b71f059a/580e6909-7d05-447a-a378-f0b8b71f059a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 748.388582] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a89f7d4d-2714-4267-b7fd-28a072183f17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.403758] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 748.403889] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 748.404141] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleting the datastore file [datastore1] 3d03e0b7-0469-4041-a7d5-7768326eb3b5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.404827] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-df0ed212-53ae-43cc-9430-3f6529f72a5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.409097] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 748.409097] env[68217]: value = "task-2961028" [ 748.409097] env[68217]: _type = "Task" [ 748.409097] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.413152] env[68217]: DEBUG oslo_vmware.api [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 748.413152] env[68217]: value = "task-2961029" [ 748.413152] env[68217]: _type = "Task" [ 748.413152] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.419817] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961028, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.424736] env[68217]: DEBUG oslo_vmware.api [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961029, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.462456] env[68217]: DEBUG nova.network.neutron [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 748.591656] env[68217]: DEBUG nova.network.neutron [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Updating instance_info_cache with network_info: [{"id": "d0d0d745-839e-4300-96dc-96e3be561179", "address": "fa:16:3e:aa:b2:f1", "network": {"id": "6ae903e7-df2e-45a9-9ce4-f7ff542dfc17", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1117114126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8c4e14c1fc4f9998c2af7a7169a1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d0d745-83", "ovs_interfaceid": "d0d0d745-839e-4300-96dc-96e3be561179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.672190] env[68217]: DEBUG nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 748.699483] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 748.699483] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.699483] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 748.699483] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.699796] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 748.699796] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 748.699796] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 748.699796] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 748.699796] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 748.699939] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 748.699939] env[68217]: DEBUG nova.virt.hardware [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 748.701040] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23d6d2c6-8918-4ad9-98a7-1fc02177b0c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.708720] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b1b3c8-7391-4b50-922c-91fc315f7e9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.725622] env[68217]: DEBUG nova.scheduler.client.report [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 748.920053] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961028, 'name': ReconfigVM_Task, 'duration_secs': 0.287502} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.920739] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Reconfigured VM instance instance-00000028 to attach disk [datastore2] 580e6909-7d05-447a-a378-f0b8b71f059a/580e6909-7d05-447a-a378-f0b8b71f059a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 748.921451] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-55c2b3cb-469b-4e75-9fc3-f9b03120036a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.925751] env[68217]: DEBUG oslo_vmware.api [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961029, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.150854} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.926347] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 748.926533] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 748.926721] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 748.926894] env[68217]: INFO nova.compute.manager [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Took 1.11 seconds to destroy the instance on the hypervisor. [ 748.927169] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 748.927519] env[68217]: DEBUG nova.compute.manager [-] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 748.927519] env[68217]: DEBUG nova.network.neutron [-] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 748.930766] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 748.930766] env[68217]: value = "task-2961030" [ 748.930766] env[68217]: _type = "Task" [ 748.930766] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.939881] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961030, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.095018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Releasing lock "refresh_cache-71243775-e8df-4cc5-85c9-d64a244b4426" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 749.095543] env[68217]: DEBUG nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Instance network_info: |[{"id": "d0d0d745-839e-4300-96dc-96e3be561179", "address": "fa:16:3e:aa:b2:f1", "network": {"id": "6ae903e7-df2e-45a9-9ce4-f7ff542dfc17", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1117114126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8c4e14c1fc4f9998c2af7a7169a1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d0d745-83", "ovs_interfaceid": "d0d0d745-839e-4300-96dc-96e3be561179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 749.096032] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:aa:b2:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'cd098b1c-636f-492d-b5ae-037cb0cae454', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd0d0d745-839e-4300-96dc-96e3be561179', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 749.103555] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Creating folder: Project (0e8c4e14c1fc4f9998c2af7a7169a1e7). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.103848] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e17dd36-fc2a-40b5-bffd-f7b32e3f9f3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.114496] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Created folder: Project (0e8c4e14c1fc4f9998c2af7a7169a1e7) in parent group-v594094. [ 749.115570] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Creating folder: Instances. Parent ref: group-v594221. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 749.115570] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-47b3a210-e9d8-4be6-bc3a-29b3f044bc6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.125229] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Created folder: Instances in parent group-v594221. [ 749.125466] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 749.125661] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 749.125864] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb0a8881-2791-4acf-95ac-60281b73f4b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.145382] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 749.145382] env[68217]: value = "task-2961033" [ 749.145382] env[68217]: _type = "Task" [ 749.145382] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.153098] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961033, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.230815] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.586s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.231379] env[68217]: DEBUG nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 749.234020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 50.263s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.234231] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.241149] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.319s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.241352] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.243871] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 46.951s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.244071] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.245765] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 46.725s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.248181] env[68217]: INFO nova.compute.claims [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 749.281467] env[68217]: INFO nova.scheduler.client.report [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Deleted allocations for instance 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6 [ 749.287661] env[68217]: INFO nova.scheduler.client.report [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Deleted allocations for instance f748cf37-6605-49a2-a418-51667a0fac4a [ 749.309289] env[68217]: INFO nova.scheduler.client.report [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Deleted allocations for instance a4a88f10-937a-4fa6-aa15-eb7f669e77d0 [ 749.395158] env[68217]: DEBUG nova.compute.manager [req-2f212da6-f4c9-4e08-a4ed-b5faddfa9b69 req-c5a1830c-09fe-4713-8b01-2e621739be83 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Received event network-vif-plugged-bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 749.395379] env[68217]: DEBUG oslo_concurrency.lockutils [req-2f212da6-f4c9-4e08-a4ed-b5faddfa9b69 req-c5a1830c-09fe-4713-8b01-2e621739be83 service nova] Acquiring lock "d0d8ed27-003e-43e2-8a07-041420a2c758-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 749.395615] env[68217]: DEBUG oslo_concurrency.lockutils [req-2f212da6-f4c9-4e08-a4ed-b5faddfa9b69 req-c5a1830c-09fe-4713-8b01-2e621739be83 service nova] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 749.395807] env[68217]: DEBUG oslo_concurrency.lockutils [req-2f212da6-f4c9-4e08-a4ed-b5faddfa9b69 req-c5a1830c-09fe-4713-8b01-2e621739be83 service nova] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.396399] env[68217]: DEBUG nova.compute.manager [req-2f212da6-f4c9-4e08-a4ed-b5faddfa9b69 req-c5a1830c-09fe-4713-8b01-2e621739be83 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] No waiting events found dispatching network-vif-plugged-bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 749.396652] env[68217]: WARNING nova.compute.manager [req-2f212da6-f4c9-4e08-a4ed-b5faddfa9b69 req-c5a1830c-09fe-4713-8b01-2e621739be83 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Received unexpected event network-vif-plugged-bde1de37-ba7a-4f49-94b6-85acc11e39a6 for instance with vm_state building and task_state spawning. [ 749.442631] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961030, 'name': Rename_Task, 'duration_secs': 0.142002} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.442631] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 749.442631] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-197906d3-e6e0-4739-8116-752bab306573 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.450022] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 749.450022] env[68217]: value = "task-2961034" [ 749.450022] env[68217]: _type = "Task" [ 749.450022] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.465463] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961034, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.492037] env[68217]: DEBUG nova.compute.manager [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Received event network-changed-d0d0d745-839e-4300-96dc-96e3be561179 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 749.492037] env[68217]: DEBUG nova.compute.manager [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Refreshing instance network info cache due to event network-changed-d0d0d745-839e-4300-96dc-96e3be561179. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 749.495060] env[68217]: DEBUG oslo_concurrency.lockutils [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] Acquiring lock "refresh_cache-71243775-e8df-4cc5-85c9-d64a244b4426" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.495060] env[68217]: DEBUG oslo_concurrency.lockutils [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] Acquired lock "refresh_cache-71243775-e8df-4cc5-85c9-d64a244b4426" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.495060] env[68217]: DEBUG nova.network.neutron [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Refreshing network info cache for port d0d0d745-839e-4300-96dc-96e3be561179 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 749.505465] env[68217]: DEBUG nova.network.neutron [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Successfully updated port: bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 749.655954] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961033, 'name': CreateVM_Task, 'duration_secs': 0.376669} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.656372] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 749.656753] env[68217]: DEBUG nova.network.neutron [-] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.658459] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.658827] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 749.660039] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 749.660039] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-15879787-fe49-4853-9efc-f06982a97ec3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.665013] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 749.665013] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5213e0e7-3458-ce67-eb2d-f0e8512cac51" [ 749.665013] env[68217]: _type = "Task" [ 749.665013] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 749.674169] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5213e0e7-3458-ce67-eb2d-f0e8512cac51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 749.754330] env[68217]: DEBUG nova.compute.utils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 749.756557] env[68217]: DEBUG nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 749.756913] env[68217]: DEBUG nova.network.neutron [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 749.800423] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c1479301-2958-4b47-8fdc-21f06c8dba3b tempest-ServersAdminNegativeTestJSON-165429239 tempest-ServersAdminNegativeTestJSON-165429239-project-member] Lock "4f4dc254-8e4f-4c5f-a2a8-eef6230825c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 54.443s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.800423] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8209f3f-2408-46cb-bd90-245675f74b55 tempest-ImagesOneServerTestJSON-1813475746 tempest-ImagesOneServerTestJSON-1813475746-project-member] Lock "f748cf37-6605-49a2-a418-51667a0fac4a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.197s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.815865] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d77ff447-b493-45d0-a414-320be2795f50 tempest-InstanceActionsTestJSON-1862365545 tempest-InstanceActionsTestJSON-1862365545-project-member] Lock "a4a88f10-937a-4fa6-aa15-eb7f669e77d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.252s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 749.873160] env[68217]: DEBUG nova.policy [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '96d4c4affb734e3c9e36c8d028f1b42f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'abe88ad43d2c4fd681e7d2aa42c7d362', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 749.961039] env[68217]: DEBUG oslo_vmware.api [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961034, 'name': PowerOnVM_Task, 'duration_secs': 0.496736} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 749.961474] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 749.964032] env[68217]: INFO nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Took 7.61 seconds to spawn the instance on the hypervisor. [ 749.964032] env[68217]: DEBUG nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 749.964032] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89d1fdcc-1bad-415b-b3b0-bbdc12f12dab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.009824] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.010142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquired lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.010546] env[68217]: DEBUG nova.network.neutron [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 750.161461] env[68217]: INFO nova.compute.manager [-] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Took 1.23 seconds to deallocate network for instance. [ 750.179339] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5213e0e7-3458-ce67-eb2d-f0e8512cac51, 'name': SearchDatastore_Task, 'duration_secs': 0.018713} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.179696] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.180319] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 750.180655] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 750.180949] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 750.181195] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 750.183377] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a812f7a7-e3b0-4e5a-abd3-446483ae5756 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.194192] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 750.194378] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 750.195109] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-040e5ade-7ffe-4038-a5ce-c26d1d2a0b20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.201986] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 750.201986] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521c57e1-cf3c-3b0a-8009-285e962da032" [ 750.201986] env[68217]: _type = "Task" [ 750.201986] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.206735] env[68217]: DEBUG nova.network.neutron [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Updated VIF entry in instance network info cache for port d0d0d745-839e-4300-96dc-96e3be561179. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 750.207151] env[68217]: DEBUG nova.network.neutron [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Updating instance_info_cache with network_info: [{"id": "d0d0d745-839e-4300-96dc-96e3be561179", "address": "fa:16:3e:aa:b2:f1", "network": {"id": "6ae903e7-df2e-45a9-9ce4-f7ff542dfc17", "bridge": "br-int", "label": "tempest-ServerGroupTestJSON-1117114126-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0e8c4e14c1fc4f9998c2af7a7169a1e7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "cd098b1c-636f-492d-b5ae-037cb0cae454", "external-id": "nsx-vlan-transportzone-377", "segmentation_id": 377, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd0d0d745-83", "ovs_interfaceid": "d0d0d745-839e-4300-96dc-96e3be561179", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.211703] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521c57e1-cf3c-3b0a-8009-285e962da032, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.257907] env[68217]: DEBUG nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 750.484370] env[68217]: INFO nova.compute.manager [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Took 61.30 seconds to build instance. [ 750.559185] env[68217]: DEBUG nova.network.neutron [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 750.671450] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 750.722239] env[68217]: DEBUG oslo_concurrency.lockutils [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] Releasing lock "refresh_cache-71243775-e8df-4cc5-85c9-d64a244b4426" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.722570] env[68217]: DEBUG nova.compute.manager [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Received event network-vif-deleted-b72a88d5-3b2e-461c-be4a-193dde782c4c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 750.722672] env[68217]: INFO nova.compute.manager [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Neutron deleted interface b72a88d5-3b2e-461c-be4a-193dde782c4c; detaching it from the instance and deleting it from the info cache [ 750.722815] env[68217]: DEBUG nova.network.neutron [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.724507] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521c57e1-cf3c-3b0a-8009-285e962da032, 'name': SearchDatastore_Task, 'duration_secs': 0.01852} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.728294] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c79150e-1d00-4420-8b16-ffd4d6fad3ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.729853] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-17e341bc-5620-48ba-b72b-57f585a34f9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.735873] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 750.735873] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2979c-b5d5-3422-644d-f156d026ce62" [ 750.735873] env[68217]: _type = "Task" [ 750.735873] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.741438] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-999b9867-826e-479c-aa39-4aa8cc072686 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.753558] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2979c-b5d5-3422-644d-f156d026ce62, 'name': SearchDatastore_Task, 'duration_secs': 0.010288} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 750.781270] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 750.781557] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 71243775-e8df-4cc5-85c9-d64a244b4426/71243775-e8df-4cc5-85c9-d64a244b4426.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 750.786414] env[68217]: DEBUG nova.network.neutron [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Updating instance_info_cache with network_info: [{"id": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "address": "fa:16:3e:47:42:b4", "network": {"id": "cedd6389-aede-436b-a684-49896e5db27a", "bridge": "br-int", "label": "tempest-ServersTestJSON-66859414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fcea47a290440bcb11f3f962f8e6de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde1de37-ba", "ovs_interfaceid": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.787829] env[68217]: DEBUG nova.network.neutron [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Successfully created port: e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 750.789689] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c5844b5-0bc4-45a0-81bc-0ccdb10f5914 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.792577] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b34e14-5d06-44ea-bd1f-d0c21ac86d1b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.802093] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01a7857e-ed4c-49e8-9a48-f1006e15e18b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.808021] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 750.808021] env[68217]: value = "task-2961035" [ 750.808021] env[68217]: _type = "Task" [ 750.808021] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 750.820052] env[68217]: DEBUG nova.compute.provider_tree [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 750.828551] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961035, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 750.986139] env[68217]: DEBUG oslo_concurrency.lockutils [None req-38185dd0-b934-4915-b335-d1cd104d74cd tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "580e6909-7d05-447a-a378-f0b8b71f059a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.952s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.226703] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d4eb6c4f-e1b7-4c7d-936b-9717339b80db {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.243981] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92d2303-2513-4e0e-9060-ad78105f530e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.275899] env[68217]: DEBUG nova.compute.manager [req-e6fec17d-2aaa-4d1c-91a1-456298496225 req-4c8a6b31-5354-40c3-a358-0fedf636fd45 service nova] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Detach interface failed, port_id=b72a88d5-3b2e-461c-be4a-193dde782c4c, reason: Instance 3d03e0b7-0469-4041-a7d5-7768326eb3b5 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 751.296938] env[68217]: DEBUG nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 751.301928] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Releasing lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 751.303492] env[68217]: DEBUG nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Instance network_info: |[{"id": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "address": "fa:16:3e:47:42:b4", "network": {"id": "cedd6389-aede-436b-a684-49896e5db27a", "bridge": "br-int", "label": "tempest-ServersTestJSON-66859414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fcea47a290440bcb11f3f962f8e6de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde1de37-ba", "ovs_interfaceid": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 751.305032] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:47:42:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93c5b7ce-4c84-40bc-884c-b2453e0eee69', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bde1de37-ba7a-4f49-94b6-85acc11e39a6', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 751.312257] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Creating folder: Project (3fcea47a290440bcb11f3f962f8e6de5). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.312651] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7c168dfd-5765-4ec2-85b2-02210f38bef6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.326021] env[68217]: DEBUG nova.scheduler.client.report [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 751.331720] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961035, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522463} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.336454] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 71243775-e8df-4cc5-85c9-d64a244b4426/71243775-e8df-4cc5-85c9-d64a244b4426.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 751.336454] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 751.336454] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Created folder: Project (3fcea47a290440bcb11f3f962f8e6de5) in parent group-v594094. [ 751.336454] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Creating folder: Instances. Parent ref: group-v594224. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 751.336957] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-139dc751-5ff8-44ed-ac41-e5882d4a8e39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.339756] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-72cdd3aa-f50b-435d-b6b8-9f5b0d7fa630 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.346550] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 751.346958] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 751.347440] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 751.347775] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 751.348052] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 751.348324] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 751.348676] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 751.349299] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 751.349299] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 751.349299] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 751.349492] env[68217]: DEBUG nova.virt.hardware [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 751.350432] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7345ce2f-28e7-4f5c-960d-3263f56b333e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.356555] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Created folder: Instances in parent group-v594224. [ 751.357520] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 751.357520] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 751.357520] env[68217]: value = "task-2961037" [ 751.357520] env[68217]: _type = "Task" [ 751.357520] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.358084] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 751.359621] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c7859e4f-0c65-4750-be67-e72d051d4159 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.388080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e34633b1-29c4-424a-b786-59eda066c15e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.395884] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961037, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.396100] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 751.396100] env[68217]: value = "task-2961039" [ 751.396100] env[68217]: _type = "Task" [ 751.396100] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.413539] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961039, 'name': CreateVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.489213] env[68217]: DEBUG nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 751.598177] env[68217]: DEBUG nova.compute.manager [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Received event network-changed-bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 751.598475] env[68217]: DEBUG nova.compute.manager [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Refreshing instance network info cache due to event network-changed-bde1de37-ba7a-4f49-94b6-85acc11e39a6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 751.598751] env[68217]: DEBUG oslo_concurrency.lockutils [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] Acquiring lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.600785] env[68217]: DEBUG oslo_concurrency.lockutils [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] Acquired lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.601039] env[68217]: DEBUG nova.network.neutron [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Refreshing network info cache for port bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 751.835882] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.590s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.836302] env[68217]: DEBUG nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 751.841993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 48.496s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.841993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 751.844204] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 47.791s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 751.849543] env[68217]: INFO nova.compute.claims [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 751.872863] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961037, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066018} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.872968] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 751.874627] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5b9ddd-52a2-4b8a-8d1b-0dd61931567d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.904895] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Reconfiguring VM instance instance-00000029 to attach disk [datastore1] 71243775-e8df-4cc5-85c9-d64a244b4426/71243775-e8df-4cc5-85c9-d64a244b4426.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 751.906358] env[68217]: INFO nova.scheduler.client.report [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Deleted allocations for instance dc45d268-7a7f-4e65-b6fa-942ddba69b03 [ 751.909293] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3baaeabd-1c3e-415d-91dc-34d511e99b39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.937180] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961039, 'name': CreateVM_Task, 'duration_secs': 0.475467} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 751.938222] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 751.938449] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 751.938449] env[68217]: value = "task-2961040" [ 751.938449] env[68217]: _type = "Task" [ 751.938449] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.939126] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 751.939278] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 751.939578] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 751.939932] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c8504ed6-961e-44e2-a904-c909f893328f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.948248] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 751.948248] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e50e17-567a-1edd-8740-cb59e6068354" [ 751.948248] env[68217]: _type = "Task" [ 751.948248] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 751.952612] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961040, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 751.962566] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e50e17-567a-1edd-8740-cb59e6068354, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.021455] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 752.355589] env[68217]: DEBUG nova.compute.utils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 752.356989] env[68217]: DEBUG nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 752.357159] env[68217]: DEBUG nova.network.neutron [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 752.435370] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a1a8b46c-7baa-4364-897a-f181786b4842 tempest-ServerTagsTestJSON-526061362 tempest-ServerTagsTestJSON-526061362-project-member] Lock "dc45d268-7a7f-4e65-b6fa-942ddba69b03" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 52.558s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 752.452522] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961040, 'name': ReconfigVM_Task, 'duration_secs': 0.317034} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.456948] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Reconfigured VM instance instance-00000029 to attach disk [datastore1] 71243775-e8df-4cc5-85c9-d64a244b4426/71243775-e8df-4cc5-85c9-d64a244b4426.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 752.457428] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-939cdc0d-3043-41e7-b4dd-29fde5a0e8b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.467050] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e50e17-567a-1edd-8740-cb59e6068354, 'name': SearchDatastore_Task, 'duration_secs': 0.020536} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.467423] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 752.467423] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 752.467716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.469271] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.469737] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 752.470159] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 752.470159] env[68217]: value = "task-2961041" [ 752.470159] env[68217]: _type = "Task" [ 752.470159] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.470377] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-980891ae-5b1a-40a7-a21b-f73488deba6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.483457] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961041, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.487238] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 752.487238] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 752.487238] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b9ad77d-fde0-4899-9b97-c464ce4808cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.494424] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 752.494424] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5238ddf1-446f-1bae-5963-a9e84cace964" [ 752.494424] env[68217]: _type = "Task" [ 752.494424] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.495824] env[68217]: DEBUG nova.policy [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b9d8e03f73df429fb7ff9628592fefbc', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7f42746a8af4661ace1f67c4279c3b8', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 752.509011] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5238ddf1-446f-1bae-5963-a9e84cace964, 'name': SearchDatastore_Task, 'duration_secs': 0.013769} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.509968] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ed60ffb-2ff8-42a3-a4fa-695ef138ab59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.515754] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 752.515754] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52249084-b678-e9b5-be67-1dc24a2ffd57" [ 752.515754] env[68217]: _type = "Task" [ 752.515754] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 752.524606] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52249084-b678-e9b5-be67-1dc24a2ffd57, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 752.603530] env[68217]: DEBUG nova.network.neutron [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Updated VIF entry in instance network info cache for port bde1de37-ba7a-4f49-94b6-85acc11e39a6. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 752.603784] env[68217]: DEBUG nova.network.neutron [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Updating instance_info_cache with network_info: [{"id": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "address": "fa:16:3e:47:42:b4", "network": {"id": "cedd6389-aede-436b-a684-49896e5db27a", "bridge": "br-int", "label": "tempest-ServersTestJSON-66859414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fcea47a290440bcb11f3f962f8e6de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde1de37-ba", "ovs_interfaceid": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.713346] env[68217]: DEBUG nova.compute.manager [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received event network-changed-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 752.713496] env[68217]: DEBUG nova.compute.manager [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Refreshing instance network info cache due to event network-changed-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 752.714121] env[68217]: DEBUG oslo_concurrency.lockutils [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] Acquiring lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.714121] env[68217]: DEBUG oslo_concurrency.lockutils [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] Acquired lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 752.716208] env[68217]: DEBUG nova.network.neutron [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Refreshing network info cache for port 9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 752.860490] env[68217]: DEBUG nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 752.986641] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961041, 'name': Rename_Task, 'duration_secs': 0.153471} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 752.987199] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 752.987199] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5692b8b2-2552-427a-856c-66c2b15aa43b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.995046] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 752.995046] env[68217]: value = "task-2961042" [ 752.995046] env[68217]: _type = "Task" [ 752.995046] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.009540] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961042, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.028694] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52249084-b678-e9b5-be67-1dc24a2ffd57, 'name': SearchDatastore_Task, 'duration_secs': 0.011243} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.028694] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.028939] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d0d8ed27-003e-43e2-8a07-041420a2c758/d0d8ed27-003e-43e2-8a07-041420a2c758.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 753.032016] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5546611b-1c58-473b-82c9-9acb3880a602 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.037975] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 753.037975] env[68217]: value = "task-2961043" [ 753.037975] env[68217]: _type = "Task" [ 753.037975] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 753.047302] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961043, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.110948] env[68217]: DEBUG oslo_concurrency.lockutils [req-7185be90-b8c2-4446-adaf-4ca049f1cba6 req-1f264fe4-61d3-42b5-a683-17a271dbb5e4 service nova] Releasing lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 753.319703] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143e4fa1-20c4-4ccc-b495-f62dc79295ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.328116] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-287f4245-4ffb-41a7-aac3-34ac10644d7c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.371035] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55c5d973-495a-4ff9-a964-84922214cf77 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.375633] env[68217]: INFO nova.virt.block_device [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Booting with volume 03332631-865e-4746-a213-a86fd1f1f4ef at /dev/sda [ 753.388344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac98a749-37db-45b9-8eaa-e47c5c1696b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.405817] env[68217]: DEBUG nova.compute.provider_tree [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 753.432272] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-685f9397-2a03-4342-b491-13b078da45ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.445424] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afcb85af-b0dc-44fb-aea3-90f06d50f4ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.480691] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-de46c632-fe07-4917-ad5e-a584a64119ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.493473] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-205efc7b-6d0e-4824-b64d-96c663a9dc85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.517579] env[68217]: DEBUG oslo_vmware.api [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961042, 'name': PowerOnVM_Task, 'duration_secs': 0.499168} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 753.517579] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 753.517813] env[68217]: INFO nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Took 6.36 seconds to spawn the instance on the hypervisor. [ 753.517954] env[68217]: DEBUG nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 753.521844] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5dba82e-d69b-480d-8053-f5eac112a2a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.541841] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db6cdbe9-ab2b-4978-83b8-f96f840c6601 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.561617] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f188928-ea16-4c9b-bd95-e6c0702ac614 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.564782] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961043, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 753.579752] env[68217]: DEBUG nova.virt.block_device [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updating existing volume attachment record: 17767a32-267e-4d08-9eb8-20118f83f9f2 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 753.693467] env[68217]: DEBUG nova.network.neutron [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updated VIF entry in instance network info cache for port 9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 753.693805] env[68217]: DEBUG nova.network.neutron [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.897447] env[68217]: DEBUG nova.network.neutron [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Successfully updated port: e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 753.908701] env[68217]: DEBUG nova.scheduler.client.report [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 753.974319] env[68217]: DEBUG nova.network.neutron [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Successfully created port: dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 754.057978] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961043, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.695396} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.059981] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d0d8ed27-003e-43e2-8a07-041420a2c758/d0d8ed27-003e-43e2-8a07-041420a2c758.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 754.060205] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 754.060444] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-40058a4a-d510-42cc-aa1d-3e5b86d7cf24 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.062694] env[68217]: INFO nova.compute.manager [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Took 61.75 seconds to build instance. [ 754.068752] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 754.068752] env[68217]: value = "task-2961044" [ 754.068752] env[68217]: _type = "Task" [ 754.068752] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.076703] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.199672] env[68217]: DEBUG oslo_concurrency.lockutils [req-b40b1d8f-4b04-4c44-a678-ea5a62762fc6 req-622b4ca9-3de3-433b-80c0-86c5d84d796d service nova] Releasing lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 754.271323] env[68217]: DEBUG nova.compute.manager [req-41a832a6-82d0-43ee-8cb3-42271483498d req-7a186b09-b10f-4cdd-a5e5-ad87d5ca41b8 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Received event network-vif-plugged-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 754.271695] env[68217]: DEBUG oslo_concurrency.lockutils [req-41a832a6-82d0-43ee-8cb3-42271483498d req-7a186b09-b10f-4cdd-a5e5-ad87d5ca41b8 service nova] Acquiring lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 754.273321] env[68217]: DEBUG oslo_concurrency.lockutils [req-41a832a6-82d0-43ee-8cb3-42271483498d req-7a186b09-b10f-4cdd-a5e5-ad87d5ca41b8 service nova] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.273321] env[68217]: DEBUG oslo_concurrency.lockutils [req-41a832a6-82d0-43ee-8cb3-42271483498d req-7a186b09-b10f-4cdd-a5e5-ad87d5ca41b8 service nova] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.273321] env[68217]: DEBUG nova.compute.manager [req-41a832a6-82d0-43ee-8cb3-42271483498d req-7a186b09-b10f-4cdd-a5e5-ad87d5ca41b8 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] No waiting events found dispatching network-vif-plugged-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 754.273321] env[68217]: WARNING nova.compute.manager [req-41a832a6-82d0-43ee-8cb3-42271483498d req-7a186b09-b10f-4cdd-a5e5-ad87d5ca41b8 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Received unexpected event network-vif-plugged-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 for instance with vm_state building and task_state spawning. [ 754.404159] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 754.404369] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 754.404529] env[68217]: DEBUG nova.network.neutron [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 754.413826] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.570s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.414523] env[68217]: DEBUG nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 754.421457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 47.399s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.421609] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.424359] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 40.619s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 754.425066] env[68217]: INFO nova.compute.claims [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 754.454298] env[68217]: INFO nova.scheduler.client.report [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Deleted allocations for instance 83d32dd6-2629-4451-a746-bf5270083e2a [ 754.567754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e440aa2-7a0d-44fe-b6b9-f1eff33fb55b tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "71243775-e8df-4cc5-85c9-d64a244b4426" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.135s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.581953] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.232521} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 754.584108] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 754.589658] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce0bbc0-2336-4acb-992a-1286151d81f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.613612] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Reconfiguring VM instance instance-0000002a to attach disk [datastore1] d0d8ed27-003e-43e2-8a07-041420a2c758/d0d8ed27-003e-43e2-8a07-041420a2c758.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 754.614441] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19e057cd-2ea2-4a1f-8863-9e55b9dad2c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.635308] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 754.635308] env[68217]: value = "task-2961045" [ 754.635308] env[68217]: _type = "Task" [ 754.635308] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.648327] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961045, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.924330] env[68217]: DEBUG nova.compute.utils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 754.925585] env[68217]: DEBUG nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 754.929020] env[68217]: DEBUG nova.network.neutron [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 754.962896] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f24e9f74-11eb-4710-b047-b9c1778f7806 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "83d32dd6-2629-4451-a746-bf5270083e2a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 51.926s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 754.973759] env[68217]: DEBUG nova.network.neutron [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 755.004103] env[68217]: DEBUG nova.policy [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '959602cf63674fb1a4edccb4e452e614', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0522eaa6ebc48a28651f6b3bf1434f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 755.070807] env[68217]: DEBUG nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 755.152323] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961045, 'name': ReconfigVM_Task, 'duration_secs': 0.306331} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.152608] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Reconfigured VM instance instance-0000002a to attach disk [datastore1] d0d8ed27-003e-43e2-8a07-041420a2c758/d0d8ed27-003e-43e2-8a07-041420a2c758.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 755.153646] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c6581466-ada6-4f48-89dc-f1a9d13aa15b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.161506] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 755.161506] env[68217]: value = "task-2961046" [ 755.161506] env[68217]: _type = "Task" [ 755.161506] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.170902] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961046, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.282558] env[68217]: DEBUG nova.network.neutron [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Updating instance_info_cache with network_info: [{"id": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "address": "fa:16:3e:60:a1:d8", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5088774-71", "ovs_interfaceid": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.429270] env[68217]: DEBUG nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 755.592462] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.672627] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961046, 'name': Rename_Task, 'duration_secs': 0.165653} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.675224] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 755.675637] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5398507f-2fac-407d-91ba-7c9d0da495af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.683805] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 755.683805] env[68217]: value = "task-2961047" [ 755.683805] env[68217]: _type = "Task" [ 755.683805] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.698073] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961047, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.702760] env[68217]: DEBUG nova.network.neutron [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Successfully created port: c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 755.754165] env[68217]: DEBUG nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 755.755512] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 755.755512] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.755512] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 755.755711] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.755937] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 755.756042] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 755.756953] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 755.756953] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 755.756953] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 755.757257] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 755.757293] env[68217]: DEBUG nova.virt.hardware [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 755.758398] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23d2d93-8504-4406-be56-a6ad2358e41c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.767448] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-282a42e3-a9bb-47a1-8a88-6dfbb2d076df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.785544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Releasing lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 755.786239] env[68217]: DEBUG nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Instance network_info: |[{"id": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "address": "fa:16:3e:60:a1:d8", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5088774-71", "ovs_interfaceid": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 755.786982] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:a1:d8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '863474bc-a24a-4823-828c-580a187829e3', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5088774-710c-4aa5-bfe4-b2e2fdf7edc9', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.795521] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 755.799525] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.800035] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dbca9721-2d29-46fb-8a6d-1de17185bd51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.825780] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.825780] env[68217]: value = "task-2961048" [ 755.825780] env[68217]: _type = "Task" [ 755.825780] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.840813] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961048, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.852791] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "71243775-e8df-4cc5-85c9-d64a244b4426" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.852791] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "71243775-e8df-4cc5-85c9-d64a244b4426" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.852867] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "71243775-e8df-4cc5-85c9-d64a244b4426-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 755.853043] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "71243775-e8df-4cc5-85c9-d64a244b4426-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 755.853247] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "71243775-e8df-4cc5-85c9-d64a244b4426-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 755.858285] env[68217]: INFO nova.compute.manager [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Terminating instance [ 755.931790] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b2cd734-db2a-4d27-b5b6-b0fc0a615041 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.944595] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba9dc89-28c2-4baf-96f4-e2b7db74d43a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.989868] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-434daec0-a182-471d-a3cf-49da61f6278a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.998677] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-817d1074-9661-4337-903e-21b942de019b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.016779] env[68217]: DEBUG nova.compute.provider_tree [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.198580] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961047, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.338387] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961048, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.365732] env[68217]: DEBUG nova.compute.manager [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 756.367955] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 756.367955] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e78dae7-4188-4729-8fae-1890c189fc97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.382931] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 756.382931] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9203926a-65ad-4b9d-91e1-f62606f322cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.391021] env[68217]: DEBUG oslo_vmware.api [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 756.391021] env[68217]: value = "task-2961049" [ 756.391021] env[68217]: _type = "Task" [ 756.391021] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.398219] env[68217]: DEBUG oslo_vmware.api [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961049, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.440952] env[68217]: DEBUG nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 756.466894] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 756.467466] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 756.467819] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 756.468141] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 756.468490] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 756.468727] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 756.469060] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 756.469336] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 756.469618] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 756.469902] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 756.470368] env[68217]: DEBUG nova.virt.hardware [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 756.471450] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05a8057d-093e-4489-af8d-05d9c50b8189 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.484109] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68247374-6245-4767-86fc-209ba4d4c400 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.498105] env[68217]: DEBUG nova.network.neutron [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Successfully updated port: dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 756.520442] env[68217]: DEBUG nova.scheduler.client.report [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 756.702304] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961047, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.738636] env[68217]: DEBUG nova.compute.manager [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Received event network-changed-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 756.738819] env[68217]: DEBUG nova.compute.manager [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Refreshing instance network info cache due to event network-changed-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 756.741290] env[68217]: DEBUG oslo_concurrency.lockutils [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] Acquiring lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.741466] env[68217]: DEBUG oslo_concurrency.lockutils [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] Acquired lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.741642] env[68217]: DEBUG nova.network.neutron [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Refreshing network info cache for port e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 756.838607] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961048, 'name': CreateVM_Task, 'duration_secs': 0.86965} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.840015] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.840015] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.840142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 756.840408] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 756.840940] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36a56d5e-e612-4ad3-a8ff-c4d8a4184432 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.846745] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 756.846745] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52509251-128b-9d7c-0ced-688c8aa128f3" [ 756.846745] env[68217]: _type = "Task" [ 756.846745] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.856648] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52509251-128b-9d7c-0ced-688c8aa128f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.902982] env[68217]: DEBUG oslo_vmware.api [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961049, 'name': PowerOffVM_Task, 'duration_secs': 0.288991} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.903551] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 756.903724] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 756.903986] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b95421b2-d9bc-4a4b-b7b5-c8b7d84fb079 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.977599] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 756.977921] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 756.977921] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Deleting the datastore file [datastore1] 71243775-e8df-4cc5-85c9-d64a244b4426 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 756.978151] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f1b4a40a-90d4-4f53-a659-1e767254327b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.984837] env[68217]: DEBUG oslo_vmware.api [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for the task: (returnval){ [ 756.984837] env[68217]: value = "task-2961051" [ 756.984837] env[68217]: _type = "Task" [ 756.984837] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.001474] env[68217]: DEBUG oslo_vmware.api [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961051, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.002078] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Acquiring lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.002244] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Acquired lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.002298] env[68217]: DEBUG nova.network.neutron [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 757.028578] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.029138] env[68217]: DEBUG nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 757.031878] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 40.124s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.032095] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.034291] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 37.345s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 757.036343] env[68217]: INFO nova.compute.claims [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.079973] env[68217]: INFO nova.scheduler.client.report [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Deleted allocations for instance db4cf157-9511-423c-aa41-433af8d92b48 [ 757.198177] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961047, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.362052] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52509251-128b-9d7c-0ced-688c8aa128f3, 'name': SearchDatastore_Task, 'duration_secs': 0.026232} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.362391] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 757.362635] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.364606] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.364606] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 757.364606] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.364606] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2a9fe4d9-3e92-4462-9769-e0b9f71da09f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.374721] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.374877] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.377936] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-92e4591b-6bf7-41b6-aaf9-adbbdf608e0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.383992] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 757.383992] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5288c6a9-b517-91a8-8c5a-711189602401" [ 757.383992] env[68217]: _type = "Task" [ 757.383992] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.392396] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5288c6a9-b517-91a8-8c5a-711189602401, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.497649] env[68217]: DEBUG oslo_vmware.api [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Task: {'id': task-2961051, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.378334} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.498960] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 757.498960] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 757.498960] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 757.498960] env[68217]: INFO nova.compute.manager [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Took 1.13 seconds to destroy the instance on the hypervisor. [ 757.498960] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 757.499238] env[68217]: DEBUG nova.compute.manager [-] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 757.499238] env[68217]: DEBUG nova.network.neutron [-] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 757.543669] env[68217]: DEBUG nova.compute.utils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 757.559222] env[68217]: DEBUG nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 757.559222] env[68217]: DEBUG nova.network.neutron [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 757.592900] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4dafe6cf-decb-40dc-9c72-74b603fa5f62 tempest-ServersTestManualDisk-1774495805 tempest-ServersTestManualDisk-1774495805-project-member] Lock "db4cf157-9511-423c-aa41-433af8d92b48" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 44.490s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 757.636396] env[68217]: DEBUG nova.network.neutron [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 757.704240] env[68217]: DEBUG oslo_vmware.api [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961047, 'name': PowerOnVM_Task, 'duration_secs': 1.696924} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.707032] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 757.707032] env[68217]: INFO nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Took 9.03 seconds to spawn the instance on the hypervisor. [ 757.707032] env[68217]: DEBUG nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 757.709503] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33589bce-6ba8-4e6c-b37d-8099eb487b75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.762429] env[68217]: DEBUG nova.network.neutron [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Successfully updated port: c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 757.796505] env[68217]: DEBUG nova.policy [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5cec0c4325164a28a663f79559271d79', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '023b801c234d47d79cb57ea73058e81c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 757.895572] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5288c6a9-b517-91a8-8c5a-711189602401, 'name': SearchDatastore_Task, 'duration_secs': 0.035963} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.896648] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20be8f47-8453-4717-a0c3-33cae283a05e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.903875] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 757.903875] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524fcc81-3a7e-9b39-83b3-e629aaccafc8" [ 757.903875] env[68217]: _type = "Task" [ 757.903875] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.912071] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524fcc81-3a7e-9b39-83b3-e629aaccafc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.023477] env[68217]: DEBUG nova.network.neutron [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Updated VIF entry in instance network info cache for port e5088774-710c-4aa5-bfe4-b2e2fdf7edc9. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 758.023832] env[68217]: DEBUG nova.network.neutron [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Updating instance_info_cache with network_info: [{"id": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "address": "fa:16:3e:60:a1:d8", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5088774-71", "ovs_interfaceid": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.054283] env[68217]: DEBUG nova.compute.manager [req-76f22232-beeb-45cf-95c8-af3313d1a48f req-00a30177-5fca-441e-9333-051ffb362428 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-vif-plugged-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.054490] env[68217]: DEBUG oslo_concurrency.lockutils [req-76f22232-beeb-45cf-95c8-af3313d1a48f req-00a30177-5fca-441e-9333-051ffb362428 service nova] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.054687] env[68217]: DEBUG oslo_concurrency.lockutils [req-76f22232-beeb-45cf-95c8-af3313d1a48f req-00a30177-5fca-441e-9333-051ffb362428 service nova] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.054844] env[68217]: DEBUG oslo_concurrency.lockutils [req-76f22232-beeb-45cf-95c8-af3313d1a48f req-00a30177-5fca-441e-9333-051ffb362428 service nova] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.055027] env[68217]: DEBUG nova.compute.manager [req-76f22232-beeb-45cf-95c8-af3313d1a48f req-00a30177-5fca-441e-9333-051ffb362428 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] No waiting events found dispatching network-vif-plugged-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 758.055190] env[68217]: WARNING nova.compute.manager [req-76f22232-beeb-45cf-95c8-af3313d1a48f req-00a30177-5fca-441e-9333-051ffb362428 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received unexpected event network-vif-plugged-c4ffafa7-b375-4f41-90e8-0db42f248139 for instance with vm_state building and task_state spawning. [ 758.058780] env[68217]: DEBUG nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 758.194524] env[68217]: DEBUG nova.network.neutron [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updating instance_info_cache with network_info: [{"id": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "address": "fa:16:3e:b8:ff:00", "network": {"id": "ab4806dd-5eba-46c0-8f5f-2c2304fb3bfa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1073338723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7f42746a8af4661ace1f67c4279c3b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd5b95b3-32", "ovs_interfaceid": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.230487] env[68217]: INFO nova.compute.manager [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Took 63.84 seconds to build instance. [ 758.266797] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.266797] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 758.266797] env[68217]: DEBUG nova.network.neutron [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 758.419928] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524fcc81-3a7e-9b39-83b3-e629aaccafc8, 'name': SearchDatastore_Task, 'duration_secs': 0.010023} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.423619] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.423619] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 7ec30097-1151-4b0d-8226-e4d34ea7b3c9/7ec30097-1151-4b0d-8226-e4d34ea7b3c9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 758.424771] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c82b1792-c5b4-4ff5-937a-749c1871e525 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.431768] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 758.431768] env[68217]: value = "task-2961052" [ 758.431768] env[68217]: _type = "Task" [ 758.431768] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.440282] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961052, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.535792] env[68217]: DEBUG oslo_concurrency.lockutils [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] Releasing lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.536067] env[68217]: DEBUG nova.compute.manager [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Received event network-vif-plugged-dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 758.536266] env[68217]: DEBUG oslo_concurrency.lockutils [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] Acquiring lock "0552d616-a406-4dfa-8a70-82f39fb98bbc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 758.536460] env[68217]: DEBUG oslo_concurrency.lockutils [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 758.536618] env[68217]: DEBUG oslo_concurrency.lockutils [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.536779] env[68217]: DEBUG nova.compute.manager [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] No waiting events found dispatching network-vif-plugged-dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 758.536945] env[68217]: WARNING nova.compute.manager [req-a37550f3-7c26-430c-ac08-89c42cf6e250 req-84b37ca3-8ebe-4086-a95c-4b7670b00766 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Received unexpected event network-vif-plugged-dd5b95b3-32c1-4279-b996-ecf817d6418d for instance with vm_state building and task_state spawning. [ 758.603596] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f745625-4ae4-4295-aa1a-bf98126457a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.613529] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6262c59c-39fa-41e3-9d54-cb044fed7df4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.647401] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03c19bb1-c8f1-4f17-b192-2cc909dd58cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.657094] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fea7e8-3248-4e0c-8f13-e1affec06edc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.673406] env[68217]: DEBUG nova.compute.provider_tree [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.699584] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Releasing lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 758.699584] env[68217]: DEBUG nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Instance network_info: |[{"id": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "address": "fa:16:3e:b8:ff:00", "network": {"id": "ab4806dd-5eba-46c0-8f5f-2c2304fb3bfa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1073338723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7f42746a8af4661ace1f67c4279c3b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd5b95b3-32", "ovs_interfaceid": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 758.700067] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b8:ff:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2c7c1b46-cb81-45da-b5aa-7905d4da5854', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'dd5b95b3-32c1-4279-b996-ecf817d6418d', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.710022] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Creating folder: Project (c7f42746a8af4661ace1f67c4279c3b8). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.710022] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-60cefd5e-23df-41d9-8f96-ac083d6f1de0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.728262] env[68217]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 758.728262] env[68217]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68217) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 758.728616] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Folder already exists: Project (c7f42746a8af4661ace1f67c4279c3b8). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 758.729138] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Creating folder: Instances. Parent ref: group-v594147. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 758.729698] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8c3579ab-d5e5-436b-bcfc-ae897e73a900 tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.658s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 758.730851] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff65d3af-a824-480e-9d37-3fd931ba0ec2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.745336] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Created folder: Instances in parent group-v594147. [ 758.745612] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 758.745938] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 758.746302] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a88f17fd-3029-4a43-893d-7569c20e6f9f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.771037] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.771037] env[68217]: value = "task-2961055" [ 758.771037] env[68217]: _type = "Task" [ 758.771037] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.781475] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961055, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.847543] env[68217]: DEBUG nova.network.neutron [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 758.886927] env[68217]: DEBUG nova.network.neutron [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Successfully created port: d74ea5d5-78b7-4327-aefe-3d63fd497956 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.942403] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961052, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.498998} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.942871] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 7ec30097-1151-4b0d-8226-e4d34ea7b3c9/7ec30097-1151-4b0d-8226-e4d34ea7b3c9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 758.942871] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.943159] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-782060a9-7589-4165-9d26-7aebe7ea9476 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.954134] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 758.954134] env[68217]: value = "task-2961056" [ 758.954134] env[68217]: _type = "Task" [ 758.954134] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.969362] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961056, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.024399] env[68217]: DEBUG nova.network.neutron [-] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.071453] env[68217]: DEBUG nova.compute.manager [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Received event network-changed-dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 759.071639] env[68217]: DEBUG nova.compute.manager [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Refreshing instance network info cache due to event network-changed-dd5b95b3-32c1-4279-b996-ecf817d6418d. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 759.071845] env[68217]: DEBUG oslo_concurrency.lockutils [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] Acquiring lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 759.072876] env[68217]: DEBUG oslo_concurrency.lockutils [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] Acquired lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 759.072876] env[68217]: DEBUG nova.network.neutron [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Refreshing network info cache for port dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 759.076925] env[68217]: DEBUG nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 759.105306] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 759.105306] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 759.105306] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 759.105644] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 759.105644] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 759.105644] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 759.105739] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 759.105827] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 759.105991] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 759.107364] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 759.107364] env[68217]: DEBUG nova.virt.hardware [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 759.108640] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f526e342-ca94-439c-9c08-9eba6125ff43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.119332] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cecaccd0-1a2e-417a-b085-cff09c0e93a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.179177] env[68217]: DEBUG nova.scheduler.client.report [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 759.211071] env[68217]: DEBUG nova.network.neutron [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": "c4ffafa7-b375-4f41-90e8-0db42f248139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.236523] env[68217]: DEBUG nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 759.282767] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961055, 'name': CreateVM_Task, 'duration_secs': 0.343204} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.283318] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 759.284853] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'attachment_id': '17767a32-267e-4d08-9eb8-20118f83f9f2', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594156', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'name': 'volume-03332631-865e-4746-a213-a86fd1f1f4ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0552d616-a406-4dfa-8a70-82f39fb98bbc', 'attached_at': '', 'detached_at': '', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'serial': '03332631-865e-4746-a213-a86fd1f1f4ef'}, 'guest_format': None, 'delete_on_termination': True, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=68217) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 759.285144] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Root volume attach. Driver type: vmdk {{(pid=68217) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 759.286860] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ace04b7-5a3a-4da8-a030-ce15bc344757 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.298368] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47ebd43-5568-4999-9fc1-03d9d17e0671 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.309636] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2518ebc8-4a76-4b1f-b7c5-abf37e20aa72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.315098] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-b3092b84-cb2c-425d-882f-48b4ce3ead8d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.322572] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 759.322572] env[68217]: value = "task-2961057" [ 759.322572] env[68217]: _type = "Task" [ 759.322572] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.330976] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961057, 'name': RelocateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.467227] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961056, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080181} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.467616] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 759.468701] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca442441-e14c-499d-9f3f-69317ebf85cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.496260] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Reconfiguring VM instance instance-0000002b to attach disk [datastore1] 7ec30097-1151-4b0d-8226-e4d34ea7b3c9/7ec30097-1151-4b0d-8226-e4d34ea7b3c9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.496481] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ddc5e64-bf78-4a11-b5d9-74ce7e61bca8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.520407] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 759.520407] env[68217]: value = "task-2961058" [ 759.520407] env[68217]: _type = "Task" [ 759.520407] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.528633] env[68217]: INFO nova.compute.manager [-] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Took 2.03 seconds to deallocate network for instance. [ 759.535227] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961058, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.684898] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.650s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.685500] env[68217]: DEBUG nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 759.689479] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 32.970s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.689798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 759.692837] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 29.426s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 759.694350] env[68217]: INFO nova.compute.claims [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 759.715062] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 759.715062] env[68217]: DEBUG nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Instance network_info: |[{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": "c4ffafa7-b375-4f41-90e8-0db42f248139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 759.715982] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:21:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4ffafa7-b375-4f41-90e8-0db42f248139', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 759.726514] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating folder: Project (c0522eaa6ebc48a28651f6b3bf1434f3). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.727997] env[68217]: INFO nova.scheduler.client.report [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Deleted allocations for instance 11f9c054-62b9-4ac9-9651-5c85e7a86663 [ 759.729355] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e381873d-7cda-4f1e-8596-b1f3880a4a90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.745360] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created folder: Project (c0522eaa6ebc48a28651f6b3bf1434f3) in parent group-v594094. [ 759.746542] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating folder: Instances. Parent ref: group-v594230. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 759.749313] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0a91f9ab-f474-4412-958a-5ac73314e11f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.761893] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created folder: Instances in parent group-v594230. [ 759.761893] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 759.762988] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 759.762988] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-567c2086-36f2-468a-bbfa-75bdab24946c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.782935] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 759.789601] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 759.789601] env[68217]: value = "task-2961061" [ 759.789601] env[68217]: _type = "Task" [ 759.789601] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.803337] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961061, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.836570] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961057, 'name': RelocateVM_Task, 'duration_secs': 0.380017} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.837644] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 759.837644] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594156', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'name': 'volume-03332631-865e-4746-a213-a86fd1f1f4ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0552d616-a406-4dfa-8a70-82f39fb98bbc', 'attached_at': '', 'detached_at': '', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'serial': '03332631-865e-4746-a213-a86fd1f1f4ef'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 759.838636] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4306099e-e57a-4640-a74c-b71ca37e10fc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.857264] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94ccb59-de78-4b0d-bd4d-f25e9cbb3dbf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.882751] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Reconfiguring VM instance instance-0000002c to attach disk [datastore1] volume-03332631-865e-4746-a213-a86fd1f1f4ef/volume-03332631-865e-4746-a213-a86fd1f1f4ef.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 759.885838] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c57f6d6-392b-40d8-81cf-4f892e2dea60 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.909691] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 759.909691] env[68217]: value = "task-2961062" [ 759.909691] env[68217]: _type = "Task" [ 759.909691] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.919410] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961062, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.035097] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961058, 'name': ReconfigVM_Task, 'duration_secs': 0.333638} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.035678] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Reconfigured VM instance instance-0000002b to attach disk [datastore1] 7ec30097-1151-4b0d-8226-e4d34ea7b3c9/7ec30097-1151-4b0d-8226-e4d34ea7b3c9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.036425] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e0b7a7f9-257e-4969-a77f-48ae9a9ed3bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.043867] env[68217]: DEBUG nova.network.neutron [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updated VIF entry in instance network info cache for port dd5b95b3-32c1-4279-b996-ecf817d6418d. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 760.044250] env[68217]: DEBUG nova.network.neutron [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updating instance_info_cache with network_info: [{"id": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "address": "fa:16:3e:b8:ff:00", "network": {"id": "ab4806dd-5eba-46c0-8f5f-2c2304fb3bfa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1073338723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7f42746a8af4661ace1f67c4279c3b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd5b95b3-32", "ovs_interfaceid": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.046514] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.053016] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 760.053016] env[68217]: value = "task-2961063" [ 760.053016] env[68217]: _type = "Task" [ 760.053016] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.075651] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961063, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.202750] env[68217]: DEBUG nova.compute.utils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 760.209026] env[68217]: DEBUG nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 760.209026] env[68217]: DEBUG nova.network.neutron [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 760.239955] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87d45752-2b29-453f-9374-a3278467ab95 tempest-ListServerFiltersTestJSON-472228476 tempest-ListServerFiltersTestJSON-472228476-project-member] Lock "11f9c054-62b9-4ac9-9651-5c85e7a86663" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 36.512s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 760.283019] env[68217]: DEBUG nova.policy [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b23052f548884b09bc58f4e0e6783591', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '36c9130982204910a139a45ddad542c3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 760.301146] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961061, 'name': CreateVM_Task, 'duration_secs': 0.351302} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.301301] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 760.302417] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.302587] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.303040] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 760.303224] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3113fea6-a560-4877-b770-29352378b25e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.309526] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 760.309526] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db3405-3f99-f40f-45fb-009d78a85694" [ 760.309526] env[68217]: _type = "Task" [ 760.309526] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.319373] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db3405-3f99-f40f-45fb-009d78a85694, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.424100] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961062, 'name': ReconfigVM_Task, 'duration_secs': 0.329232} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.424453] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Reconfigured VM instance instance-0000002c to attach disk [datastore1] volume-03332631-865e-4746-a213-a86fd1f1f4ef/volume-03332631-865e-4746-a213-a86fd1f1f4ef.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 760.429314] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f8015317-bd40-428f-a6ad-6f8174fa9f2c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.445851] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 760.445851] env[68217]: value = "task-2961064" [ 760.445851] env[68217]: _type = "Task" [ 760.445851] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.456513] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961064, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.553466] env[68217]: DEBUG oslo_concurrency.lockutils [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] Releasing lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.553745] env[68217]: DEBUG nova.compute.manager [req-d0376d09-43cb-4d0b-9856-5ca44d47677d req-503c82dc-a763-4cdd-8877-1cd076a58658 service nova] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Received event network-vif-deleted-d0d0d745-839e-4300-96dc-96e3be561179 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 760.564776] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961063, 'name': Rename_Task, 'duration_secs': 0.188066} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.565321] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 760.565605] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e2abf984-ba24-4f71-ad48-63f43b1ae3ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.572423] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 760.572423] env[68217]: value = "task-2961065" [ 760.572423] env[68217]: _type = "Task" [ 760.572423] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.582842] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961065, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.709016] env[68217]: DEBUG nova.compute.utils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 760.778368] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "e642c93b-ca48-4d23-9abb-ff243855d8d0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 760.778555] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 760.829040] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52db3405-3f99-f40f-45fb-009d78a85694, 'name': SearchDatastore_Task, 'duration_secs': 0.010488} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.829040] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 760.829040] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 760.829040] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 760.829384] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 760.829384] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 760.829384] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d78406f1-a616-4392-b39d-1133c835eaa9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.836588] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 760.836790] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 760.838901] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66b67d18-c36c-4b31-b076-de0a28ae5677 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.845159] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 760.845159] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff9d35-2a44-ec69-5888-43bd50af1616" [ 760.845159] env[68217]: _type = "Task" [ 760.845159] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.857071] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff9d35-2a44-ec69-5888-43bd50af1616, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.963959] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961064, 'name': ReconfigVM_Task, 'duration_secs': 0.130859} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.964588] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594156', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'name': 'volume-03332631-865e-4746-a213-a86fd1f1f4ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0552d616-a406-4dfa-8a70-82f39fb98bbc', 'attached_at': '', 'detached_at': '', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'serial': '03332631-865e-4746-a213-a86fd1f1f4ef'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 760.965623] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b3e42eaa-9152-4a47-ad03-04cbc0fd7918 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.972594] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 760.972594] env[68217]: value = "task-2961066" [ 760.972594] env[68217]: _type = "Task" [ 760.972594] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 760.985133] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961066, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.015096] env[68217]: DEBUG nova.network.neutron [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Successfully created port: 5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 761.093127] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961065, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 761.196251] env[68217]: DEBUG nova.compute.manager [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 761.196431] env[68217]: DEBUG nova.compute.manager [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing instance network info cache due to event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 761.196637] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] Acquiring lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.196785] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] Acquired lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 761.197106] env[68217]: DEBUG nova.network.neutron [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 761.985628] env[68217]: DEBUG nova.network.neutron [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Successfully updated port: d74ea5d5-78b7-4327-aefe-3d63fd497956 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 761.986879] env[68217]: DEBUG nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 761.992839] env[68217]: DEBUG nova.compute.manager [req-ae2b66f9-9657-4830-b8e7-c6a32c22c5e5 req-a55280e3-fc13-4cf7-a872-d500fabe5fb7 service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Received event network-vif-plugged-d74ea5d5-78b7-4327-aefe-3d63fd497956 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 761.993044] env[68217]: DEBUG oslo_concurrency.lockutils [req-ae2b66f9-9657-4830-b8e7-c6a32c22c5e5 req-a55280e3-fc13-4cf7-a872-d500fabe5fb7 service nova] Acquiring lock "156ea1ad-6e52-4848-915d-7ba74c606e6e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 761.993242] env[68217]: DEBUG oslo_concurrency.lockutils [req-ae2b66f9-9657-4830-b8e7-c6a32c22c5e5 req-a55280e3-fc13-4cf7-a872-d500fabe5fb7 service nova] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 761.993418] env[68217]: DEBUG oslo_concurrency.lockutils [req-ae2b66f9-9657-4830-b8e7-c6a32c22c5e5 req-a55280e3-fc13-4cf7-a872-d500fabe5fb7 service nova] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 761.993581] env[68217]: DEBUG nova.compute.manager [req-ae2b66f9-9657-4830-b8e7-c6a32c22c5e5 req-a55280e3-fc13-4cf7-a872-d500fabe5fb7 service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] No waiting events found dispatching network-vif-plugged-d74ea5d5-78b7-4327-aefe-3d63fd497956 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 761.993731] env[68217]: WARNING nova.compute.manager [req-ae2b66f9-9657-4830-b8e7-c6a32c22c5e5 req-a55280e3-fc13-4cf7-a872-d500fabe5fb7 service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Received unexpected event network-vif-plugged-d74ea5d5-78b7-4327-aefe-3d63fd497956 for instance with vm_state building and task_state spawning. [ 762.005605] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 762.006156] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 762.016090] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961066, 'name': Rename_Task, 'duration_secs': 0.13109} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.016339] env[68217]: DEBUG oslo_vmware.api [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961065, 'name': PowerOnVM_Task, 'duration_secs': 0.682039} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.019837] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 762.020184] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.020322] env[68217]: INFO nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Took 10.72 seconds to spawn the instance on the hypervisor. [ 762.023974] env[68217]: DEBUG nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.023974] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff9d35-2a44-ec69-5888-43bd50af1616, 'name': SearchDatastore_Task, 'duration_secs': 0.012794} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.023974] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e43ba795-a962-4a30-9b73-201aa96c98c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.025834] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02258a3d-16ba-43f2-a7b7-0ccc8c6ee694 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.030059] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2dc62e91-b253-45a6-89d7-349b009d0e83 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.036305] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 762.036305] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526ccda8-145e-e75d-d40c-4776fb3a092e" [ 762.036305] env[68217]: _type = "Task" [ 762.036305] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.039338] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 762.039338] env[68217]: value = "task-2961067" [ 762.039338] env[68217]: _type = "Task" [ 762.039338] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.054127] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526ccda8-145e-e75d-d40c-4776fb3a092e, 'name': SearchDatastore_Task, 'duration_secs': 0.012504} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.058091] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 762.058091] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 762.058091] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961067, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.058091] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-748a9de0-985e-43d9-89e4-52ccd2696f4b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.068370] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 762.068370] env[68217]: value = "task-2961068" [ 762.068370] env[68217]: _type = "Task" [ 762.068370] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.078392] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961068, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.125237] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3efa4ff4-cf94-4461-be78-2da16e846087 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.133302] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-653d682f-8d1d-4535-accb-46edd3c71d11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.169313] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c110353-e8fd-43b2-a142-2155670aa518 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.178090] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb002a8d-dc9b-4e7c-a2b0-8460d81bf887 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.193624] env[68217]: DEBUG nova.compute.provider_tree [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.502990] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "refresh_cache-156ea1ad-6e52-4848-915d-7ba74c606e6e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.503209] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "refresh_cache-156ea1ad-6e52-4848-915d-7ba74c606e6e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 762.503362] env[68217]: DEBUG nova.network.neutron [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 762.563962] env[68217]: INFO nova.compute.manager [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Took 63.89 seconds to build instance. [ 762.578170] env[68217]: DEBUG oslo_vmware.api [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961067, 'name': PowerOnVM_Task, 'duration_secs': 0.497223} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.581571] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 762.581804] env[68217]: INFO nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Took 6.83 seconds to spawn the instance on the hypervisor. [ 762.582682] env[68217]: DEBUG nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 762.584364] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91cdfaf-2c60-4971-ac22-86239c6359ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.592629] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961068, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.696729] env[68217]: DEBUG nova.scheduler.client.report [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 763.007979] env[68217]: DEBUG nova.network.neutron [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updated VIF entry in instance network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 763.009037] env[68217]: DEBUG nova.network.neutron [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": "c4ffafa7-b375-4f41-90e8-0db42f248139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.011210] env[68217]: DEBUG nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 763.038783] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:15:01Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=1,extra_specs={hw_rng:allowed='True'},flavorid='401513634',id=21,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_1-1318664198',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 763.042631] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 763.042631] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 763.042631] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 763.042631] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 763.042631] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 763.042983] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 763.042983] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 763.042983] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 763.042983] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 763.042983] env[68217]: DEBUG nova.virt.hardware [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 763.043179] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9710b84-8512-4c72-87e8-cfa0cfda3410 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.050741] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d381ec4a-a0c4-48c2-b119-1f17109bb2d3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.068572] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3919b191-3a9c-48a3-8135-58e87a1b6bd6 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.435s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.069670] env[68217]: DEBUG nova.network.neutron [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 763.084627] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961068, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.573163} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.084896] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 763.085157] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 763.085444] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4270b1ab-92bf-41d0-88e1-ddfde3bd3f0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.094958] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 763.094958] env[68217]: value = "task-2961069" [ 763.094958] env[68217]: _type = "Task" [ 763.094958] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.106021] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961069, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.117696] env[68217]: INFO nova.compute.manager [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Took 60.63 seconds to build instance. [ 763.201544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.509s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.202080] env[68217]: DEBUG nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 763.204979] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.902s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.206239] env[68217]: INFO nova.compute.claims [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 763.368852] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "23366029-e754-49dc-ba56-7a0d92232d81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 763.369180] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "23366029-e754-49dc-ba56-7a0d92232d81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 763.406404] env[68217]: DEBUG nova.network.neutron [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Updating instance_info_cache with network_info: [{"id": "d74ea5d5-78b7-4327-aefe-3d63fd497956", "address": "fa:16:3e:d5:19:29", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd74ea5d5-78", "ovs_interfaceid": "d74ea5d5-78b7-4327-aefe-3d63fd497956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.517436] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5b8158b-8bed-4b63-a300-3f381a910fcf req-36f5da28-ea40-4d98-ad30-c66450533b63 service nova] Releasing lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.534818] env[68217]: DEBUG nova.network.neutron [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Successfully updated port: 5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 763.575700] env[68217]: DEBUG nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 763.587491] env[68217]: DEBUG nova.compute.manager [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Received event network-changed-bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 763.587491] env[68217]: DEBUG nova.compute.manager [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Refreshing instance network info cache due to event network-changed-bde1de37-ba7a-4f49-94b6-85acc11e39a6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 763.587491] env[68217]: DEBUG oslo_concurrency.lockutils [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] Acquiring lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 763.587491] env[68217]: DEBUG oslo_concurrency.lockutils [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] Acquired lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 763.587491] env[68217]: DEBUG nova.network.neutron [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Refreshing network info cache for port bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 763.609320] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961069, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070762} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.609320] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 763.610541] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f23a5d6-217b-4d5f-8a1f-506700b0562d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.631151] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f88d002d-ed81-40c5-a530-63e0b6e5c149 tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.824s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 763.641176] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 763.643483] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eee0669b-704f-4417-bc71-c06dbdff8b37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.667663] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 763.667663] env[68217]: value = "task-2961070" [ 763.667663] env[68217]: _type = "Task" [ 763.667663] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.678247] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961070, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 763.714851] env[68217]: DEBUG nova.compute.utils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 763.718626] env[68217]: DEBUG nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 763.718824] env[68217]: DEBUG nova.network.neutron [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 763.861090] env[68217]: DEBUG nova.policy [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b76681b9ef1446dda7a508c8ade75e69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '904fd1b1eb9d4ab8bd1ea9967249bc29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 763.909251] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "refresh_cache-156ea1ad-6e52-4848-915d-7ba74c606e6e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 763.909589] env[68217]: DEBUG nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Instance network_info: |[{"id": "d74ea5d5-78b7-4327-aefe-3d63fd497956", "address": "fa:16:3e:d5:19:29", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd74ea5d5-78", "ovs_interfaceid": "d74ea5d5-78b7-4327-aefe-3d63fd497956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 763.910020] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:19:29', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd74ea5d5-78b7-4327-aefe-3d63fd497956', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 763.923257] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 763.923584] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 763.923831] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-be6113eb-0235-49e5-bf38-1e3ad6095d0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.949384] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 763.949384] env[68217]: value = "task-2961071" [ 763.949384] env[68217]: _type = "Task" [ 763.949384] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 763.957237] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961071, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.038376] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.038966] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.039333] env[68217]: DEBUG nova.network.neutron [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 764.108653] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.163028] env[68217]: DEBUG nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 764.168346] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "b7fe971e-353f-427c-896c-32f9de0d70e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.168968] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 764.182298] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961070, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.219385] env[68217]: DEBUG nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 764.363045] env[68217]: DEBUG nova.compute.manager [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Received event network-changed-d74ea5d5-78b7-4327-aefe-3d63fd497956 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 764.363269] env[68217]: DEBUG nova.compute.manager [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Refreshing instance network info cache due to event network-changed-d74ea5d5-78b7-4327-aefe-3d63fd497956. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 764.363486] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Acquiring lock "refresh_cache-156ea1ad-6e52-4848-915d-7ba74c606e6e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.363653] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Acquired lock "refresh_cache-156ea1ad-6e52-4848-915d-7ba74c606e6e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.363823] env[68217]: DEBUG nova.network.neutron [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Refreshing network info cache for port d74ea5d5-78b7-4327-aefe-3d63fd497956 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 764.461553] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961071, 'name': CreateVM_Task, 'duration_secs': 0.47954} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.465081] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 764.466210] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.466710] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.467245] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 764.467630] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32e09a0d-d92e-496c-acc5-1ab49c2fef96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.476029] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 764.476029] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528ab16e-e93a-177b-af84-82fb6089096f" [ 764.476029] env[68217]: _type = "Task" [ 764.476029] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.488021] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528ab16e-e93a-177b-af84-82fb6089096f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.685617] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961070, 'name': ReconfigVM_Task, 'duration_secs': 0.547113} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.689489] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfigured VM instance instance-0000002d to attach disk [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 764.690517] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5890b34-869a-45f8-9449-17d120dd859d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.693242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 764.697851] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 764.697851] env[68217]: value = "task-2961072" [ 764.697851] env[68217]: _type = "Task" [ 764.697851] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.709450] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961072, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.726464] env[68217]: DEBUG nova.network.neutron [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 764.737509] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fd6f836-ade7-4fe3-9fd7-3a0bae2d1cd6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.745861] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2d65e4-2287-4d7e-ac89-8dd707285305 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.781187] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba7f49a8-841a-44eb-81e8-48ca3d39b678 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.790113] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78acafff-00cc-49b5-89db-08eb6c5ece0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.804201] env[68217]: DEBUG nova.compute.provider_tree [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.983901] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528ab16e-e93a-177b-af84-82fb6089096f, 'name': SearchDatastore_Task, 'duration_secs': 0.036262} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 764.984217] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 764.984473] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 764.984678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 764.984820] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 764.984990] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 764.985356] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-68dae561-5b01-40e0-a08e-88dcb1207007 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.994024] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 764.994205] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 764.994966] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d056803e-371b-46b3-9e8d-0b241d882ee9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.999810] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 764.999810] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52791ee8-69be-5f81-3f5d-3a9745850826" [ 764.999810] env[68217]: _type = "Task" [ 764.999810] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.008988] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52791ee8-69be-5f81-3f5d-3a9745850826, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.209872] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961072, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.235507] env[68217]: DEBUG nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 765.266794] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 765.267081] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.267244] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 765.267428] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.267578] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 765.267718] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 765.268040] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 765.268195] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 765.268422] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 765.268636] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 765.268855] env[68217]: DEBUG nova.virt.hardware [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 765.269816] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e46dc8f-9c27-4605-8170-4e514743060b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.279113] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4b8e090-50a5-48fe-be29-8ad40389c06f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.310528] env[68217]: DEBUG nova.scheduler.client.report [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 765.511490] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52791ee8-69be-5f81-3f5d-3a9745850826, 'name': SearchDatastore_Task, 'duration_secs': 0.012214} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.512308] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1089f9c-51d4-4cde-999a-a21f0134398e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.517519] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 765.517519] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521037ed-48e1-cc99-263d-1b74a15e5495" [ 765.517519] env[68217]: _type = "Task" [ 765.517519] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.526197] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521037ed-48e1-cc99-263d-1b74a15e5495, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.628632] env[68217]: DEBUG nova.network.neutron [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Updating instance_info_cache with network_info: [{"id": "5417460d-31c2-4462-b8aa-192085fc884f", "address": "fa:16:3e:55:a8:37", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5417460d-31", "ovs_interfaceid": "5417460d-31c2-4462-b8aa-192085fc884f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.662302] env[68217]: DEBUG nova.network.neutron [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Updated VIF entry in instance network info cache for port bde1de37-ba7a-4f49-94b6-85acc11e39a6. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 765.662660] env[68217]: DEBUG nova.network.neutron [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Updating instance_info_cache with network_info: [{"id": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "address": "fa:16:3e:47:42:b4", "network": {"id": "cedd6389-aede-436b-a684-49896e5db27a", "bridge": "br-int", "label": "tempest-ServersTestJSON-66859414-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.148", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3fcea47a290440bcb11f3f962f8e6de5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93c5b7ce-4c84-40bc-884c-b2453e0eee69", "external-id": "nsx-vlan-transportzone-882", "segmentation_id": 882, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbde1de37-ba", "ovs_interfaceid": "bde1de37-ba7a-4f49-94b6-85acc11e39a6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 765.729082] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961072, 'name': Rename_Task, 'duration_secs': 0.910098} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 765.729960] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 765.730347] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bf026aef-8ee0-40aa-ad63-0ec9b28b554b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.738931] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 765.738931] env[68217]: value = "task-2961073" [ 765.738931] env[68217]: _type = "Task" [ 765.738931] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 765.750891] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961073, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.821133] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.614s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 765.821133] env[68217]: DEBUG nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 765.822788] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 25.875s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 765.822788] env[68217]: DEBUG nova.objects.instance [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 766.028904] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521037ed-48e1-cc99-263d-1b74a15e5495, 'name': SearchDatastore_Task, 'duration_secs': 0.011915} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.028904] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.028904] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 156ea1ad-6e52-4848-915d-7ba74c606e6e/156ea1ad-6e52-4848-915d-7ba74c606e6e.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 766.028904] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-af9802c5-475b-40aa-92e4-7a0671ba0600 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.035283] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 766.035283] env[68217]: value = "task-2961074" [ 766.035283] env[68217]: _type = "Task" [ 766.035283] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.042719] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961074, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.131158] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Releasing lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.131549] env[68217]: DEBUG nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Instance network_info: |[{"id": "5417460d-31c2-4462-b8aa-192085fc884f", "address": "fa:16:3e:55:a8:37", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5417460d-31", "ovs_interfaceid": "5417460d-31c2-4462-b8aa-192085fc884f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 766.131992] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:55:a8:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '42f08482-a1da-405d-9918-d733d9f5173c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5417460d-31c2-4462-b8aa-192085fc884f', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 766.141015] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 766.141015] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 766.141015] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-82ee04ca-5dfd-42bb-82c9-80886c2f4cb4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.163933] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 766.163933] env[68217]: value = "task-2961075" [ 766.163933] env[68217]: _type = "Task" [ 766.163933] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.167327] env[68217]: DEBUG oslo_concurrency.lockutils [req-aadca7a4-a48e-4f3f-98e2-a41084e852a6 req-bf84cce0-57aa-4173-9f68-bc1dc196584b service nova] Releasing lock "refresh_cache-d0d8ed27-003e-43e2-8a07-041420a2c758" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.172321] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.211959] env[68217]: DEBUG nova.network.neutron [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Successfully created port: 86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 766.249122] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961073, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.326900] env[68217]: DEBUG nova.compute.utils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 766.332329] env[68217]: DEBUG nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 766.336029] env[68217]: DEBUG nova.network.neutron [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 766.339878] env[68217]: DEBUG nova.network.neutron [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Updated VIF entry in instance network info cache for port d74ea5d5-78b7-4327-aefe-3d63fd497956. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 766.340370] env[68217]: DEBUG nova.network.neutron [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Updating instance_info_cache with network_info: [{"id": "d74ea5d5-78b7-4327-aefe-3d63fd497956", "address": "fa:16:3e:d5:19:29", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd74ea5d5-78", "ovs_interfaceid": "d74ea5d5-78b7-4327-aefe-3d63fd497956", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.512098] env[68217]: DEBUG nova.policy [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '98519c6a9a164db39df83142383e97aa', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b99296b92df248d684d9e224d27bdcbc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 766.547972] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961074, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.676856] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.749390] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961073, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.833338] env[68217]: DEBUG nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 766.837090] env[68217]: DEBUG oslo_concurrency.lockutils [None req-90acfe79-d6a5-4f72-9434-7a5b9e717da1 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.838337] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.868s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.839390] env[68217]: DEBUG nova.objects.instance [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lazy-loading 'resources' on Instance uuid 9d2b3670-ef8a-477a-b876-7a8fe37fa065 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 766.843036] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Releasing lock "refresh_cache-156ea1ad-6e52-4848-915d-7ba74c606e6e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 766.844293] env[68217]: DEBUG nova.compute.manager [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Received event network-vif-plugged-5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 766.844293] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Acquiring lock "7584180b-efa6-4038-9f3a-619ab7937553-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 766.844293] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Lock "7584180b-efa6-4038-9f3a-619ab7937553-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 766.844293] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Lock "7584180b-efa6-4038-9f3a-619ab7937553-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 766.844293] env[68217]: DEBUG nova.compute.manager [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] No waiting events found dispatching network-vif-plugged-5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 766.844552] env[68217]: WARNING nova.compute.manager [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Received unexpected event network-vif-plugged-5417460d-31c2-4462-b8aa-192085fc884f for instance with vm_state building and task_state spawning. [ 766.844552] env[68217]: DEBUG nova.compute.manager [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Received event network-changed-5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 766.844656] env[68217]: DEBUG nova.compute.manager [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Refreshing instance network info cache due to event network-changed-5417460d-31c2-4462-b8aa-192085fc884f. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 766.844847] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Acquiring lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.844979] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Acquired lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 766.845165] env[68217]: DEBUG nova.network.neutron [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Refreshing network info cache for port 5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 767.046641] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961074, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640905} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.047103] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 156ea1ad-6e52-4848-915d-7ba74c606e6e/156ea1ad-6e52-4848-915d-7ba74c606e6e.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 767.047233] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 767.047465] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-47c5c10d-559e-4ec1-a255-5db50644bc64 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.053654] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 767.053654] env[68217]: value = "task-2961076" [ 767.053654] env[68217]: _type = "Task" [ 767.053654] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.062861] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961076, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.178063] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.250389] env[68217]: DEBUG oslo_vmware.api [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961073, 'name': PowerOnVM_Task, 'duration_secs': 1.170623} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.250685] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 767.250871] env[68217]: INFO nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Took 10.81 seconds to spawn the instance on the hypervisor. [ 767.251020] env[68217]: DEBUG nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 767.252450] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be5e8c51-9668-42cc-9fb8-713c93ae84cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.571063] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961076, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.074994} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.571419] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 767.572367] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0d522c-f8c6-4e6f-80df-6affe459bac8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.599974] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Reconfiguring VM instance instance-0000002e to attach disk [datastore1] 156ea1ad-6e52-4848-915d-7ba74c606e6e/156ea1ad-6e52-4848-915d-7ba74c606e6e.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 767.602962] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-72fc3fb9-9e61-41e9-b3b2-bbf01b8c19a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.628544] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 767.628544] env[68217]: value = "task-2961077" [ 767.628544] env[68217]: _type = "Task" [ 767.628544] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.638697] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961077, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.678700] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.714315] env[68217]: DEBUG nova.network.neutron [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Successfully created port: fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 767.774287] env[68217]: INFO nova.compute.manager [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Took 63.74 seconds to build instance. [ 767.850374] env[68217]: DEBUG nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 767.896693] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 767.896921] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 767.897086] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 767.897278] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 767.897422] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 767.897565] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 767.897773] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 767.897932] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 767.899291] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 767.899516] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 767.902028] env[68217]: DEBUG nova.virt.hardware [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 767.903499] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe02e44c-5101-443e-bc3a-219b062a128f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.907801] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f573182-4757-40e7-bd52-98465b2dbe8b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.917729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f3e82d5-58e9-411e-80f8-f61ce37f1005 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.922566] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baeb54f7-6554-46f8-b0aa-c041389c4785 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.960984] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c02b5a93-336c-471c-8bbf-b20ba46fd9af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.972318] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c9bcd9-7b36-419d-85bb-628b36ce5dff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.990440] env[68217]: DEBUG nova.compute.provider_tree [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.063384] env[68217]: DEBUG nova.network.neutron [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Updated VIF entry in instance network info cache for port 5417460d-31c2-4462-b8aa-192085fc884f. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 768.063978] env[68217]: DEBUG nova.network.neutron [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Updating instance_info_cache with network_info: [{"id": "5417460d-31c2-4462-b8aa-192085fc884f", "address": "fa:16:3e:55:a8:37", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5417460d-31", "ovs_interfaceid": "5417460d-31c2-4462-b8aa-192085fc884f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.140033] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961077, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.175875] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.276408] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1482f038-7a47-48fc-9c9c-a862b8c61570 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 110.748s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 768.297186] env[68217]: DEBUG nova.compute.manager [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Received event network-changed-dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 768.297186] env[68217]: DEBUG nova.compute.manager [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Refreshing instance network info cache due to event network-changed-dd5b95b3-32c1-4279-b996-ecf817d6418d. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 768.297186] env[68217]: DEBUG oslo_concurrency.lockutils [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] Acquiring lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 768.297877] env[68217]: DEBUG oslo_concurrency.lockutils [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] Acquired lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 768.298279] env[68217]: DEBUG nova.network.neutron [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Refreshing network info cache for port dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 768.517226] env[68217]: ERROR nova.scheduler.client.report [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [req-bcaf9094-c047-4afa-bb25-e34c89499ac9] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-bcaf9094-c047-4afa-bb25-e34c89499ac9"}]} [ 768.539087] env[68217]: DEBUG nova.scheduler.client.report [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 768.561336] env[68217]: DEBUG nova.scheduler.client.report [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 768.561820] env[68217]: DEBUG nova.compute.provider_tree [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 768.569065] env[68217]: DEBUG oslo_concurrency.lockutils [req-f15ec259-6eae-44fe-94e0-fd5be3324f10 req-4b948e81-9e33-4b1c-821a-752d854c6a1b service nova] Releasing lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 768.579052] env[68217]: DEBUG nova.scheduler.client.report [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 768.604257] env[68217]: DEBUG nova.scheduler.client.report [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 768.645174] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961077, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.679413] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.781669] env[68217]: DEBUG nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 769.085815] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192b5215-2775-4ca5-882d-13eec44da2cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.095486] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ba2871-228d-4f69-8387-f9ae4f322ef6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.135135] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b531faa1-9c77-4899-9bab-1958d45b94cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.146607] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961077, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.150155] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8442afe8-8de5-4e08-ba1e-162940dca06d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 769.165574] env[68217]: DEBUG nova.compute.provider_tree [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.178206] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.309905] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 769.404191] env[68217]: DEBUG nova.network.neutron [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Successfully updated port: 86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 769.640763] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961077, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.681046] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.696411] env[68217]: DEBUG nova.scheduler.client.report [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 73 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 769.696678] env[68217]: DEBUG nova.compute.provider_tree [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 73 to 74 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 769.696866] env[68217]: DEBUG nova.compute.provider_tree [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 769.725190] env[68217]: DEBUG nova.network.neutron [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updated VIF entry in instance network info cache for port dd5b95b3-32c1-4279-b996-ecf817d6418d. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 769.725578] env[68217]: DEBUG nova.network.neutron [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updating instance_info_cache with network_info: [{"id": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "address": "fa:16:3e:b8:ff:00", "network": {"id": "ab4806dd-5eba-46c0-8f5f-2c2304fb3bfa", "bridge": "br-int", "label": "tempest-ServersTestBootFromVolume-1073338723-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7f42746a8af4661ace1f67c4279c3b8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2c7c1b46-cb81-45da-b5aa-7905d4da5854", "external-id": "nsx-vlan-transportzone-15", "segmentation_id": 15, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdd5b95b3-32", "ovs_interfaceid": "dd5b95b3-32c1-4279-b996-ecf817d6418d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 769.907232] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.907390] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 769.907544] env[68217]: DEBUG nova.network.neutron [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.058814] env[68217]: DEBUG nova.network.neutron [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Successfully updated port: fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 770.147021] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961077, 'name': ReconfigVM_Task, 'duration_secs': 2.134501} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.147021] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Reconfigured VM instance instance-0000002e to attach disk [datastore1] 156ea1ad-6e52-4848-915d-7ba74c606e6e/156ea1ad-6e52-4848-915d-7ba74c606e6e.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.147021] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a5244350-fa0c-4982-8571-41a61b8f60ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.153838] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 770.153838] env[68217]: value = "task-2961081" [ 770.153838] env[68217]: _type = "Task" [ 770.153838] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.165022] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961081, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.182118] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961075, 'name': CreateVM_Task, 'duration_secs': 3.687127} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.182319] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 770.183044] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.183229] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.183541] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 770.183806] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6ce1bcd0-7ae6-437f-882b-674fd44785f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.189297] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 770.189297] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529a7804-46ab-b4ec-a4cb-0512c8780d2e" [ 770.189297] env[68217]: _type = "Task" [ 770.189297] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.197563] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529a7804-46ab-b4ec-a4cb-0512c8780d2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.202704] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.364s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.205076] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 27.412s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.205381] env[68217]: DEBUG nova.objects.instance [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lazy-loading 'resources' on Instance uuid af11d05f-4432-4505-bb52-226414488960 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 770.211257] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "2e3dae16-dba3-4230-913d-7a5c3469e36e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 770.215022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 770.226760] env[68217]: INFO nova.scheduler.client.report [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted allocations for instance 9d2b3670-ef8a-477a-b876-7a8fe37fa065 [ 770.228352] env[68217]: DEBUG oslo_concurrency.lockutils [req-f473caf3-886f-4070-944c-39d1ae7d6376 req-761a89c1-e413-4b9a-81bf-032db2c6b15e service nova] Releasing lock "refresh_cache-0552d616-a406-4dfa-8a70-82f39fb98bbc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.475481] env[68217]: DEBUG nova.network.neutron [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 770.561113] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.561313] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.561426] env[68217]: DEBUG nova.network.neutron [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 770.664349] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961081, 'name': Rename_Task, 'duration_secs': 0.18823} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.664609] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.664837] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-06c32478-5182-4f12-a162-f071dd7b92ce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.671630] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 770.671630] env[68217]: value = "task-2961082" [ 770.671630] env[68217]: _type = "Task" [ 770.671630] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.679566] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961082, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.699624] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529a7804-46ab-b4ec-a4cb-0512c8780d2e, 'name': SearchDatastore_Task, 'duration_secs': 0.012108} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.699895] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 770.700151] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 770.700378] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 770.700566] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 770.700698] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 770.700953] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-63209254-4434-49e4-bcdd-816709150355 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.711858] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 770.712066] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 770.712797] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21ebc3ca-5e92-4759-913c-1a5ac716837b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.718323] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 770.718323] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f034cb-949c-158e-f98e-7c3b3205efd6" [ 770.718323] env[68217]: _type = "Task" [ 770.718323] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.726244] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f034cb-949c-158e-f98e-7c3b3205efd6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.736322] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53d3e3d9-e7c0-4800-b485-2158031a7fed tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "9d2b3670-ef8a-477a-b876-7a8fe37fa065" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.756s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 770.740014] env[68217]: DEBUG nova.network.neutron [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updating instance_info_cache with network_info: [{"id": "86568bc3-8f1e-4880-9a22-48003fc7babd", "address": "fa:16:3e:92:6c:cf", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86568bc3-8f", "ovs_interfaceid": "86568bc3-8f1e-4880-9a22-48003fc7babd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.108957] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f1c5aae-bdf6-495b-a880-87ecd767a1aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.119177] env[68217]: DEBUG nova.network.neutron [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 771.121795] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de77b6d-ccf1-485c-a2b6-c37d7d051ac1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.156704] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a525c97-610a-4373-904c-ea7bb9cdcd1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.165387] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d8ec4e5-0d69-43d0-a05b-49dc4b7548f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.179302] env[68217]: DEBUG nova.compute.provider_tree [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.188085] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961082, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.228934] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f034cb-949c-158e-f98e-7c3b3205efd6, 'name': SearchDatastore_Task, 'duration_secs': 0.026978} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.229739] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d7fcb3-05ff-4087-b369-4ffc73bf82e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.235119] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 771.235119] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d65b42-19e5-9b69-fe77-b10c92e66b7e" [ 771.235119] env[68217]: _type = "Task" [ 771.235119] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.243160] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d65b42-19e5-9b69-fe77-b10c92e66b7e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.246041] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.246353] env[68217]: DEBUG nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Instance network_info: |[{"id": "86568bc3-8f1e-4880-9a22-48003fc7babd", "address": "fa:16:3e:92:6c:cf", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86568bc3-8f", "ovs_interfaceid": "86568bc3-8f1e-4880-9a22-48003fc7babd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 771.246714] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:92:6c:cf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '86568bc3-8f1e-4880-9a22-48003fc7babd', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 771.253914] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Creating folder: Project (904fd1b1eb9d4ab8bd1ea9967249bc29). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.254485] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-02b8534f-5d3e-48bf-bd30-f2f2afad38b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.264107] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Created folder: Project (904fd1b1eb9d4ab8bd1ea9967249bc29) in parent group-v594094. [ 771.264346] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Creating folder: Instances. Parent ref: group-v594238. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 771.264589] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-193e426f-7a85-4518-9d37-fd4934ee798c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.273117] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Created folder: Instances in parent group-v594238. [ 771.273344] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 771.273540] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 771.273737] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-75bbc98a-bedb-4c06-8275-b578850ed34d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.291958] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 771.291958] env[68217]: value = "task-2961085" [ 771.291958] env[68217]: _type = "Task" [ 771.291958] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.301906] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961085, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.335764] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 771.335948] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing instance network info cache due to event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 771.336181] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquiring lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.336315] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquired lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.336486] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.362435] env[68217]: DEBUG nova.network.neutron [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updating instance_info_cache with network_info: [{"id": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "address": "fa:16:3e:f1:18:93", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd7f0c32-56", "ovs_interfaceid": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.686197] env[68217]: DEBUG nova.scheduler.client.report [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 771.698148] env[68217]: DEBUG oslo_vmware.api [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961082, 'name': PowerOnVM_Task, 'duration_secs': 0.764686} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.698525] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.698768] env[68217]: INFO nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Took 12.62 seconds to spawn the instance on the hypervisor. [ 771.698974] env[68217]: DEBUG nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 771.700029] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa368c0b-fc7e-4802-90be-d9e0ee9e98c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.747829] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d65b42-19e5-9b69-fe77-b10c92e66b7e, 'name': SearchDatastore_Task, 'duration_secs': 0.010859} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.748126] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.748431] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 7584180b-efa6-4038-9f3a-619ab7937553/7584180b-efa6-4038-9f3a-619ab7937553.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 771.748867] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2d326f18-1d0d-461e-b1d9-866fecb21817 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.758461] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 771.758461] env[68217]: value = "task-2961086" [ 771.758461] env[68217]: _type = "Task" [ 771.758461] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.771285] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961086, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.805021] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961085, 'name': CreateVM_Task, 'duration_secs': 0.341257} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.805114] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 771.805875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.806019] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.806334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 771.806585] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72cf9f58-3dc9-48b0-992d-6e9fc2a8df08 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.811402] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 771.811402] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526697be-7273-67ff-3d3a-a64c346133c3" [ 771.811402] env[68217]: _type = "Task" [ 771.811402] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.820067] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526697be-7273-67ff-3d3a-a64c346133c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 771.864936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Releasing lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 771.865924] env[68217]: DEBUG nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Instance network_info: |[{"id": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "address": "fa:16:3e:f1:18:93", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd7f0c32-56", "ovs_interfaceid": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 771.866062] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:18:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c330dbdb-ad20-4e7e-8a12-66e4a914a84a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fd7f0c32-5631-4c5d-9e7a-12a133f76232', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 771.873817] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 771.874063] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 771.874304] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-56214060-d864-489e-a81a-0b19138bb312 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.895044] env[68217]: DEBUG nova.compute.manager [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 771.898155] env[68217]: DEBUG nova.compute.manager [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing instance network info cache due to event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 771.898155] env[68217]: DEBUG oslo_concurrency.lockutils [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] Acquiring lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 771.898155] env[68217]: DEBUG oslo_concurrency.lockutils [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] Acquired lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 771.898155] env[68217]: DEBUG nova.network.neutron [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 771.898591] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 771.898591] env[68217]: value = "task-2961087" [ 771.898591] env[68217]: _type = "Task" [ 771.898591] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 771.909281] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961087, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.193880] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.197758] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 28.772s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.198127] env[68217]: DEBUG nova.objects.instance [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lazy-loading 'resources' on Instance uuid e089c20e-b788-4e6c-9bd2-9ad485305582 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 772.221967] env[68217]: INFO nova.compute.manager [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Took 58.44 seconds to build instance. [ 772.228740] env[68217]: INFO nova.scheduler.client.report [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Deleted allocations for instance af11d05f-4432-4505-bb52-226414488960 [ 772.274615] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961086, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.323685] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526697be-7273-67ff-3d3a-a64c346133c3, 'name': SearchDatastore_Task, 'duration_secs': 0.010968} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.324055] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.324432] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 772.324709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.324884] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.325071] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 772.325445] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2c73c377-8514-48ad-8721-e7b9ec34d457 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.355286] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 772.355423] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 772.357762] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-068a143e-3ab5-46f4-9f13-842f90c594ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.363269] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 772.363269] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fc9d2e-f784-4ae1-4d15-29e5209e452d" [ 772.363269] env[68217]: _type = "Task" [ 772.363269] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.372819] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fc9d2e-f784-4ae1-4d15-29e5209e452d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.410540] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961087, 'name': CreateVM_Task, 'duration_secs': 0.378664} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.410740] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 772.411529] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.411688] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.412043] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 772.413082] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updated VIF entry in instance network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 772.413552] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [{"id": "683c092b-4729-4946-9f3a-b14200be8d7c", "address": "fa:16:3e:52:04:da", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap683c092b-47", "ovs_interfaceid": "683c092b-4729-4946-9f3a-b14200be8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 772.414806] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f7f59f2-f88b-4d10-947a-f407d75b222e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.422979] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 772.422979] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ca28d1-fd70-09f7-b12f-a357fc8a9527" [ 772.422979] env[68217]: _type = "Task" [ 772.422979] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.433772] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ca28d1-fd70-09f7-b12f-a357fc8a9527, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.724380] env[68217]: DEBUG oslo_concurrency.lockutils [None req-02e4bf2c-8aa2-4b98-8f45-602d4f5569f1 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.444s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.742118] env[68217]: DEBUG oslo_concurrency.lockutils [None req-de69bb54-fad5-46d4-a299-48276b79f51a tempest-VolumesAssistedSnapshotsTest-1702519135 tempest-VolumesAssistedSnapshotsTest-1702519135-project-member] Lock "af11d05f-4432-4505-bb52-226414488960" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 33.577s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.773172] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961086, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.640352} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.773501] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 7584180b-efa6-4038-9f3a-619ab7937553/7584180b-efa6-4038-9f3a-619ab7937553.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 772.773793] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 772.774356] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-608cadfe-b0d5-49ba-a76b-e7fe7145f366 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.780324] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 772.780324] env[68217]: value = "task-2961089" [ 772.780324] env[68217]: _type = "Task" [ 772.780324] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.791615] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961089, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.874511] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fc9d2e-f784-4ae1-4d15-29e5209e452d, 'name': SearchDatastore_Task, 'duration_secs': 0.021703} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.878615] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e695d873-5397-4132-8f0f-7365e6d571c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.885443] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 772.885443] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52404648-854a-d909-d45f-b3384e2c539e" [ 772.885443] env[68217]: _type = "Task" [ 772.885443] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.893917] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52404648-854a-d909-d45f-b3384e2c539e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 772.919858] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Releasing lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.920164] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Received event network-vif-plugged-86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.920362] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 772.920560] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 772.920721] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 772.920884] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] No waiting events found dispatching network-vif-plugged-86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 772.921064] env[68217]: WARNING nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Received unexpected event network-vif-plugged-86568bc3-8f1e-4880-9a22-48003fc7babd for instance with vm_state building and task_state spawning. [ 772.921292] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Received event network-changed-86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 772.921503] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Refreshing instance network info cache due to event network-changed-86568bc3-8f1e-4880-9a22-48003fc7babd. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 772.921647] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquiring lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.921778] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquired lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.921930] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Refreshing network info cache for port 86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 772.933665] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ca28d1-fd70-09f7-b12f-a357fc8a9527, 'name': SearchDatastore_Task, 'duration_secs': 0.014226} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 772.937274] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 772.937614] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 772.937903] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.938101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 772.938318] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 772.939281] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b6b4805-4f82-4c67-8694-cbb609c1fb18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.947610] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 772.947835] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 772.949467] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bcaa3107-f792-4a69-a4fd-e95fba7e8f2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.956837] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 772.956837] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520b24cb-e4be-bbdd-e365-24a6529f5e0a" [ 772.956837] env[68217]: _type = "Task" [ 772.956837] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 772.964529] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520b24cb-e4be-bbdd-e365-24a6529f5e0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.092834] env[68217]: DEBUG nova.network.neutron [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updated VIF entry in instance network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.093747] env[68217]: DEBUG nova.network.neutron [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": "c4ffafa7-b375-4f41-90e8-0db42f248139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.138340] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08c216c-bbfd-4ae5-8c05-a924e5a5f114 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.147274] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00144a1b-1f30-4f98-bf92-865b3d791b6c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.178712] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a9c7f7-8c3e-48dc-b544-9217155cc830 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.186376] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa440481-ed2f-499c-9bf5-544ca8c3becc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.201138] env[68217]: DEBUG nova.compute.provider_tree [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 773.227172] env[68217]: DEBUG nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 773.290466] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961089, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.127201} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.290718] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 773.291785] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9363b502-8d40-4940-97b3-056b3d0760d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.319160] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 7584180b-efa6-4038-9f3a-619ab7937553/7584180b-efa6-4038-9f3a-619ab7937553.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 773.319429] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d42536eb-8591-495e-be6e-00c1b266d22f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.340341] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 773.340341] env[68217]: value = "task-2961090" [ 773.340341] env[68217]: _type = "Task" [ 773.340341] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.348182] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961090, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.395853] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52404648-854a-d909-d45f-b3384e2c539e, 'name': SearchDatastore_Task, 'duration_secs': 0.031119} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.396122] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.396410] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f/95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 773.396659] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0631bf4f-ab85-41bb-844e-e458be92ce96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.407440] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 773.407440] env[68217]: value = "task-2961091" [ 773.407440] env[68217]: _type = "Task" [ 773.407440] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.415456] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961091, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.468527] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520b24cb-e4be-bbdd-e365-24a6529f5e0a, 'name': SearchDatastore_Task, 'duration_secs': 0.022468} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.469320] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c46efeda-4d7f-4647-8b76-940631959f66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.476537] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 773.476537] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522da28a-df74-76f6-caa0-a50c87f05029" [ 773.476537] env[68217]: _type = "Task" [ 773.476537] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.484073] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522da28a-df74-76f6-caa0-a50c87f05029, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.596220] env[68217]: DEBUG oslo_concurrency.lockutils [req-fa642069-07aa-45f3-a8c2-0074586d737f req-0dee1565-6431-49d8-b9f4-78698b426f51 service nova] Releasing lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.705035] env[68217]: DEBUG nova.scheduler.client.report [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 773.762358] env[68217]: DEBUG nova.compute.manager [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 773.762923] env[68217]: DEBUG nova.compute.manager [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing instance network info cache due to event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 773.762923] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Acquiring lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.763128] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Acquired lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 773.763128] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 773.769677] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 773.793954] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updated VIF entry in instance network info cache for port 86568bc3-8f1e-4880-9a22-48003fc7babd. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 773.794364] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updating instance_info_cache with network_info: [{"id": "86568bc3-8f1e-4880-9a22-48003fc7babd", "address": "fa:16:3e:92:6c:cf", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86568bc3-8f", "ovs_interfaceid": "86568bc3-8f1e-4880-9a22-48003fc7babd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.855722] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961090, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.920182] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961091, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508766} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.920182] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f/95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 773.920182] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 773.920182] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4adaf3fb-783f-4111-a6db-db0171b0fbef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.926618] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 773.926618] env[68217]: value = "task-2961092" [ 773.926618] env[68217]: _type = "Task" [ 773.926618] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 773.939459] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961092, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 773.988171] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522da28a-df74-76f6-caa0-a50c87f05029, 'name': SearchDatastore_Task, 'duration_secs': 0.009545} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 773.989226] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 773.989226] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 693d6a74-a671-4d02-8798-cd3975507428/693d6a74-a671-4d02-8798-cd3975507428.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 773.989226] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1389d9d1-7891-4875-aa2e-72880311ebac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.997149] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 773.997149] env[68217]: value = "task-2961093" [ 773.997149] env[68217]: _type = "Task" [ 773.997149] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.007387] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961093, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.213050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.015s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.218179] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 29.161s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.218506] env[68217]: DEBUG nova.objects.instance [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lazy-loading 'resources' on Instance uuid 38787c7d-a9cf-4ce6-a112-c1ec259697ca {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 774.238085] env[68217]: INFO nova.scheduler.client.report [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Deleted allocations for instance e089c20e-b788-4e6c-9bd2-9ad485305582 [ 774.303113] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Releasing lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 774.303405] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Received event network-vif-plugged-fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.303611] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquiring lock "693d6a74-a671-4d02-8798-cd3975507428-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.303818] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Lock "693d6a74-a671-4d02-8798-cd3975507428-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.303983] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Lock "693d6a74-a671-4d02-8798-cd3975507428-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.304177] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] No waiting events found dispatching network-vif-plugged-fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 774.307357] env[68217]: WARNING nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Received unexpected event network-vif-plugged-fd7f0c32-5631-4c5d-9e7a-12a133f76232 for instance with vm_state building and task_state spawning. [ 774.307357] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Received event network-changed-fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 774.307357] env[68217]: DEBUG nova.compute.manager [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Refreshing instance network info cache due to event network-changed-fd7f0c32-5631-4c5d-9e7a-12a133f76232. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 774.307357] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquiring lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.307357] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Acquired lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 774.307990] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Refreshing network info cache for port fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 774.335064] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.335444] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.336449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.336818] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.337427] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.340796] env[68217]: INFO nova.compute.manager [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Terminating instance [ 774.358951] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961090, 'name': ReconfigVM_Task, 'duration_secs': 0.606835} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.362703] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 7584180b-efa6-4038-9f3a-619ab7937553/7584180b-efa6-4038-9f3a-619ab7937553.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 774.363210] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Creating Virtual Disk of size 1048576 KB and adapter type paraVirtual on the data store {{(pid=68217) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1383}} [ 774.364475] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CreateVirtualDisk_Task with opID=oslo.vmware-0e9f9318-2ae1-4099-989b-4dff67ad3f40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.374700] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 774.374700] env[68217]: value = "task-2961095" [ 774.374700] env[68217]: _type = "Task" [ 774.374700] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.387874] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961095, 'name': CreateVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.441445] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961092, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068799} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.441445] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 774.441950] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665fb995-f82d-4ee6-a4f4-730493347934 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.465622] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Reconfiguring VM instance instance-00000030 to attach disk [datastore1] 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f/95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.466144] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58f3e904-0453-42d5-81ab-7b4a8c53e2d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.483219] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "156ea1ad-6e52-4848-915d-7ba74c606e6e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.483453] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.483643] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "156ea1ad-6e52-4848-915d-7ba74c606e6e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 774.483817] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 774.483980] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.486270] env[68217]: INFO nova.compute.manager [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Terminating instance [ 774.490413] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 774.490413] env[68217]: value = "task-2961096" [ 774.490413] env[68217]: _type = "Task" [ 774.490413] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.498596] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961096, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.506811] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961093, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482994} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.507945] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 693d6a74-a671-4d02-8798-cd3975507428/693d6a74-a671-4d02-8798-cd3975507428.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 774.508208] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 774.508468] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-481f036f-dcb8-4b09-9b28-9e12d8c86d31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.516291] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 774.516291] env[68217]: value = "task-2961097" [ 774.516291] env[68217]: _type = "Task" [ 774.516291] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.525483] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961097, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.752773] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4186a033-3fad-42f1-bcd5-919f17e668b0 tempest-AttachInterfacesUnderV243Test-1081799999 tempest-AttachInterfacesUnderV243Test-1081799999-project-member] Lock "e089c20e-b788-4e6c-9bd2-9ad485305582" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 34.974s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 774.817469] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updated VIF entry in instance network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 774.817858] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [{"id": "683c092b-4729-4946-9f3a-b14200be8d7c", "address": "fa:16:3e:52:04:da", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap683c092b-47", "ovs_interfaceid": "683c092b-4729-4946-9f3a-b14200be8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.851374] env[68217]: DEBUG nova.compute.manager [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 774.851640] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 774.852526] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a436b54e-02c0-4526-b7d5-c54b5298ae23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.862943] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 774.866048] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c457e42-486f-4628-8e4d-d2901f884f7d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.875292] env[68217]: DEBUG oslo_vmware.api [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 774.875292] env[68217]: value = "task-2961098" [ 774.875292] env[68217]: _type = "Task" [ 774.875292] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.889124] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961095, 'name': CreateVirtualDisk_Task, 'duration_secs': 0.068956} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.896123] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Created Virtual Disk of size 1048576 KB and type thin {{(pid=68217) create_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1404}} [ 774.896550] env[68217]: DEBUG oslo_vmware.api [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961098, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.897671] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5b8a966-5ed2-4926-ba1a-80648436e6ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.934937] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] 7584180b-efa6-4038-9f3a-619ab7937553/ephemeral_0.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 774.941710] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-84c04cfa-ce9d-4167-a1b3-b949fa21d8ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.961114] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 774.961114] env[68217]: value = "task-2961099" [ 774.961114] env[68217]: _type = "Task" [ 774.961114] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.969714] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961099, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.989851] env[68217]: DEBUG nova.compute.manager [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 774.989957] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 774.991225] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ee326c-5248-4d13-8b9e-43b972c9fce8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.006367] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961096, 'name': ReconfigVM_Task, 'duration_secs': 0.290725} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.006615] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 775.007384] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Reconfigured VM instance instance-00000030 to attach disk [datastore1] 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f/95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.007489] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d56fe153-d89b-4e25-81bc-4390fd4450ff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.008928] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a0b8ec8-ac71-4ee8-9191-a7dfea951d28 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.016162] env[68217]: DEBUG oslo_vmware.api [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 775.016162] env[68217]: value = "task-2961100" [ 775.016162] env[68217]: _type = "Task" [ 775.016162] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.017586] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 775.017586] env[68217]: value = "task-2961101" [ 775.017586] env[68217]: _type = "Task" [ 775.017586] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.036936] env[68217]: DEBUG oslo_vmware.api [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.043828] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961101, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.044154] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961097, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061012} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.047519] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 775.047970] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b179e7ee-f96c-4dd0-b575-2a4d893bd4d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.074555] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Reconfiguring VM instance instance-00000031 to attach disk [datastore2] 693d6a74-a671-4d02-8798-cd3975507428/693d6a74-a671-4d02-8798-cd3975507428.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 775.074912] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bec4bfc5-a80d-4d65-80ba-7d33d376b6e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.108575] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 775.108575] env[68217]: value = "task-2961102" [ 775.108575] env[68217]: _type = "Task" [ 775.108575] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.128125] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.271425] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba2bc16a-0f6d-48f7-8504-2926f8128588 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.285028] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58c45168-c7e5-4d8e-b904-2dc7e66b83f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.318333] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updated VIF entry in instance network info cache for port fd7f0c32-5631-4c5d-9e7a-12a133f76232. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 775.318609] env[68217]: DEBUG nova.network.neutron [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updating instance_info_cache with network_info: [{"id": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "address": "fa:16:3e:f1:18:93", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd7f0c32-56", "ovs_interfaceid": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.320623] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-078dd8eb-72b1-46dc-a399-52482d212d26 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.323711] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Releasing lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.324037] env[68217]: DEBUG nova.compute.manager [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Received event network-changed-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 775.324124] env[68217]: DEBUG nova.compute.manager [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Refreshing instance network info cache due to event network-changed-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 775.324319] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Acquiring lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.324459] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Acquired lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 775.324617] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Refreshing network info cache for port e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 775.331481] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ea35b0-5170-46f0-bd5e-c1766163c517 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.348041] env[68217]: DEBUG nova.compute.provider_tree [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.385161] env[68217]: DEBUG oslo_vmware.api [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961098, 'name': PowerOffVM_Task, 'duration_secs': 0.20447} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.385769] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 775.385769] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.385992] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1a18f64-709c-4628-93c2-a79d04446b45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.458362] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.458488] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.458677] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Deleting the datastore file [datastore1] 7ec30097-1151-4b0d-8226-e4d34ea7b3c9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.458972] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-451babfa-2712-41fa-9653-9dbf53cd8984 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.469853] env[68217]: DEBUG oslo_vmware.api [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 775.469853] env[68217]: value = "task-2961104" [ 775.469853] env[68217]: _type = "Task" [ 775.469853] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.477741] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961099, 'name': ReconfigVM_Task, 'duration_secs': 0.432795} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.477741] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Reconfigured VM instance instance-0000002f to attach disk [datastore1] 7584180b-efa6-4038-9f3a-619ab7937553/ephemeral_0.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.477969] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-361af6c7-92b6-4d1d-bd17-1c51f1fa2560 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.483278] env[68217]: DEBUG oslo_vmware.api [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961104, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.487777] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 775.487777] env[68217]: value = "task-2961105" [ 775.487777] env[68217]: _type = "Task" [ 775.487777] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.497125] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961105, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.529257] env[68217]: DEBUG oslo_vmware.api [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961100, 'name': PowerOffVM_Task, 'duration_secs': 0.195152} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.529909] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 775.530254] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 775.531707] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1d9ec461-e6f0-4c87-884d-abeeb2ffdd92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.538178] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961101, 'name': Rename_Task, 'duration_secs': 0.156605} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.538477] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 775.538934] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62342b61-9933-4f0e-b9cf-0dc4b98b7c5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.545214] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 775.545214] env[68217]: value = "task-2961107" [ 775.545214] env[68217]: _type = "Task" [ 775.545214] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.553898] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961107, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.600127] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 775.600127] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 775.600127] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleting the datastore file [datastore1] 156ea1ad-6e52-4848-915d-7ba74c606e6e {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 775.600127] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19ffdc0b-f38b-4340-8be5-1ef809da9295 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.605749] env[68217]: DEBUG oslo_vmware.api [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 775.605749] env[68217]: value = "task-2961108" [ 775.605749] env[68217]: _type = "Task" [ 775.605749] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.617440] env[68217]: DEBUG oslo_vmware.api [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961108, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.620171] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961102, 'name': ReconfigVM_Task, 'duration_secs': 0.312743} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.620439] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Reconfigured VM instance instance-00000031 to attach disk [datastore2] 693d6a74-a671-4d02-8798-cd3975507428/693d6a74-a671-4d02-8798-cd3975507428.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 775.621063] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a17e73af-b31d-4402-b9de-7f07f70c4f39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.627101] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 775.627101] env[68217]: value = "task-2961109" [ 775.627101] env[68217]: _type = "Task" [ 775.627101] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 775.639844] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961109, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 775.824623] env[68217]: DEBUG oslo_concurrency.lockutils [req-f9785ef8-a3e0-41ba-978b-6d289d10d288 req-8ac616ff-4850-4ffb-9a0c-24ea3177378b service nova] Releasing lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 775.852496] env[68217]: DEBUG nova.scheduler.client.report [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 775.985969] env[68217]: DEBUG oslo_vmware.api [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961104, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.145564} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.985969] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.985969] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 775.985969] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.985969] env[68217]: INFO nova.compute.manager [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Took 1.13 seconds to destroy the instance on the hypervisor. [ 775.986607] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 775.986607] env[68217]: DEBUG nova.compute.manager [-] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 775.986607] env[68217]: DEBUG nova.network.neutron [-] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.004460] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961105, 'name': Rename_Task, 'duration_secs': 0.267587} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.004845] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.005191] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f45085d-395b-48d6-a6a4-eb20f65d04c8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.010124] env[68217]: DEBUG nova.compute.manager [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 776.010393] env[68217]: DEBUG nova.compute.manager [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing instance network info cache due to event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 776.010691] env[68217]: DEBUG oslo_concurrency.lockutils [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] Acquiring lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.010967] env[68217]: DEBUG oslo_concurrency.lockutils [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] Acquired lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.011130] env[68217]: DEBUG nova.network.neutron [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 776.018116] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 776.018116] env[68217]: value = "task-2961110" [ 776.018116] env[68217]: _type = "Task" [ 776.018116] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.032581] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961110, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.063946] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961107, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.119417] env[68217]: DEBUG oslo_vmware.api [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961108, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.267601} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.119763] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 776.120513] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 776.120973] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 776.121240] env[68217]: INFO nova.compute.manager [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Took 1.13 seconds to destroy the instance on the hypervisor. [ 776.121531] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 776.121746] env[68217]: DEBUG nova.compute.manager [-] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 776.121831] env[68217]: DEBUG nova.network.neutron [-] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 776.136976] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961109, 'name': Rename_Task, 'duration_secs': 0.192139} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.137471] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 776.137729] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fac7686d-9160-40ac-837e-3f16328be061 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.144949] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 776.144949] env[68217]: value = "task-2961111" [ 776.144949] env[68217]: _type = "Task" [ 776.144949] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 776.157775] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961111, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.360161] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.142s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.362895] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.691s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 776.362895] env[68217]: DEBUG nova.objects.instance [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lazy-loading 'resources' on Instance uuid 3d03e0b7-0469-4041-a7d5-7768326eb3b5 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 776.383799] env[68217]: INFO nova.scheduler.client.report [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted allocations for instance 38787c7d-a9cf-4ce6-a112-c1ec259697ca [ 776.459688] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Updated VIF entry in instance network info cache for port e5088774-710c-4aa5-bfe4-b2e2fdf7edc9. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 776.459688] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Updating instance_info_cache with network_info: [{"id": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "address": "fa:16:3e:60:a1:d8", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5088774-71", "ovs_interfaceid": "e5088774-710c-4aa5-bfe4-b2e2fdf7edc9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.528223] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961110, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.556412] env[68217]: DEBUG oslo_vmware.api [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961107, 'name': PowerOnVM_Task, 'duration_secs': 0.549101} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 776.556702] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 776.556945] env[68217]: INFO nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Took 11.32 seconds to spawn the instance on the hypervisor. [ 776.557168] env[68217]: DEBUG nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 776.557966] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2c0e5e3-04c7-45e8-91c9-6f729ea6b077 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 776.657708] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961111, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 776.897652] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d73b0fb6-9851-44e3-af84-2df792169981 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "38787c7d-a9cf-4ce6-a112-c1ec259697ca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 35.273s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 776.961215] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Releasing lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 776.961817] env[68217]: DEBUG nova.compute.manager [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Received event network-changed-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 776.962136] env[68217]: DEBUG nova.compute.manager [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Refreshing instance network info cache due to event network-changed-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 776.962591] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Acquiring lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 776.962591] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Acquired lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 776.962696] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Refreshing network info cache for port e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 777.031826] env[68217]: DEBUG oslo_vmware.api [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961110, 'name': PowerOnVM_Task, 'duration_secs': 0.691247} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.032059] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.032325] env[68217]: INFO nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Took 14.02 seconds to spawn the instance on the hypervisor. [ 777.032482] env[68217]: DEBUG nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.035938] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff95a12a-6a63-4f88-a89e-6421bd4e8b19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.076429] env[68217]: INFO nova.compute.manager [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Took 46.83 seconds to build instance. [ 777.157544] env[68217]: DEBUG oslo_vmware.api [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961111, 'name': PowerOnVM_Task, 'duration_secs': 0.646811} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 777.157544] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 777.157544] env[68217]: INFO nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Took 9.31 seconds to spawn the instance on the hypervisor. [ 777.157544] env[68217]: DEBUG nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 777.160544] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7c2a125-351c-4701-a63c-0c80cfe59438 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.332144] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25db9a58-eb49-45f4-b688-1b0f57a31984 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.338437] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c787592a-855d-4c4c-8b59-a7641334e3d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.372094] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c91058b2-dc61-43a9-9450-a09fe78d3470 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.379776] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a25a7a2e-73c9-4574-8982-fd046be5c581 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.393549] env[68217]: DEBUG nova.compute.provider_tree [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.401269] env[68217]: DEBUG nova.network.neutron [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updated VIF entry in instance network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 777.401590] env[68217]: DEBUG nova.network.neutron [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [{"id": "683c092b-4729-4946-9f3a-b14200be8d7c", "address": "fa:16:3e:52:04:da", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.246", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap683c092b-47", "ovs_interfaceid": "683c092b-4729-4946-9f3a-b14200be8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.490336] env[68217]: INFO nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Port e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 777.490566] env[68217]: DEBUG nova.network.neutron [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.552471] env[68217]: INFO nova.compute.manager [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Took 57.88 seconds to build instance. [ 777.579250] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7817733b-4c1c-4e7f-9a77-de349c2a5e39 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.157s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 777.592254] env[68217]: DEBUG nova.network.neutron [-] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.679460] env[68217]: INFO nova.compute.manager [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Took 40.40 seconds to build instance. [ 777.712185] env[68217]: DEBUG nova.network.neutron [-] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.896638] env[68217]: DEBUG nova.scheduler.client.report [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 777.906158] env[68217]: DEBUG oslo_concurrency.lockutils [req-a932afa7-84a9-4d8a-842c-12f4f94f7a6d req-24814670-4cf9-4a28-a677-f560e8fc5c73 service nova] Releasing lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 777.994410] env[68217]: DEBUG oslo_concurrency.lockutils [req-e72f043a-3aa8-4fca-8eff-2150fca5aa3f req-84cf644c-09e8-4e8f-b113-02a186ed839d service nova] Releasing lock "refresh_cache-7ec30097-1151-4b0d-8226-e4d34ea7b3c9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 778.058902] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a0377a6-c85f-47f3-bad8-9581c45f6d12 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "7584180b-efa6-4038-9f3a-619ab7937553" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 73.928s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.081681] env[68217]: DEBUG nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.095942] env[68217]: INFO nova.compute.manager [-] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Took 2.11 seconds to deallocate network for instance. [ 778.183134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-69465dc0-dd7c-42b5-9bdb-311f501b8fe8 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.493s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.216190] env[68217]: INFO nova.compute.manager [-] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Took 2.09 seconds to deallocate network for instance. [ 778.278817] env[68217]: DEBUG nova.compute.manager [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Received event network-vif-deleted-e5088774-710c-4aa5-bfe4-b2e2fdf7edc9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 778.278817] env[68217]: DEBUG nova.compute.manager [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Received event network-vif-deleted-d74ea5d5-78b7-4327-aefe-3d63fd497956 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 778.278996] env[68217]: DEBUG nova.compute.manager [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 778.279375] env[68217]: DEBUG nova.compute.manager [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing instance network info cache due to event network-changed-683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 778.279785] env[68217]: DEBUG oslo_concurrency.lockutils [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] Acquiring lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 778.279951] env[68217]: DEBUG oslo_concurrency.lockutils [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] Acquired lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 778.280130] env[68217]: DEBUG nova.network.neutron [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Refreshing network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 778.402543] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.040s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 778.406367] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.385s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 778.408280] env[68217]: INFO nova.compute.claims [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.435879] env[68217]: INFO nova.scheduler.client.report [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleted allocations for instance 3d03e0b7-0469-4041-a7d5-7768326eb3b5 [ 778.564614] env[68217]: DEBUG nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.605945] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.616072] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.688105] env[68217]: DEBUG nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 778.724078] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 778.954020] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a7f9ab6-0143-48be-957d-56ddfada334e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "3d03e0b7-0469-4041-a7d5-7768326eb3b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 31.657s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 779.098143] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.213280] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.292579] env[68217]: DEBUG nova.network.neutron [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updated VIF entry in instance network info cache for port 683c092b-4729-4946-9f3a-b14200be8d7c. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 779.292579] env[68217]: DEBUG nova.network.neutron [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [{"id": "683c092b-4729-4946-9f3a-b14200be8d7c", "address": "fa:16:3e:52:04:da", "network": {"id": "1df67255-b55c-4745-9486-c3a61fbee22c", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-1634624222-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "abe88ad43d2c4fd681e7d2aa42c7d362", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "863474bc-a24a-4823-828c-580a187829e3", "external-id": "nsx-vlan-transportzone-617", "segmentation_id": 617, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap683c092b-47", "ovs_interfaceid": "683c092b-4729-4946-9f3a-b14200be8d7c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 779.623435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "693d6a74-a671-4d02-8798-cd3975507428" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 779.624228] env[68217]: DEBUG oslo_concurrency.lockutils [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 779.624228] env[68217]: INFO nova.compute.manager [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Rebooting instance [ 779.631025] env[68217]: DEBUG nova.compute.manager [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Received event network-changed-5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 779.631025] env[68217]: DEBUG nova.compute.manager [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Refreshing instance network info cache due to event network-changed-5417460d-31c2-4462-b8aa-192085fc884f. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 779.631025] env[68217]: DEBUG oslo_concurrency.lockutils [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] Acquiring lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 779.631025] env[68217]: DEBUG oslo_concurrency.lockutils [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] Acquired lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 779.631025] env[68217]: DEBUG nova.network.neutron [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Refreshing network info cache for port 5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 779.793936] env[68217]: DEBUG oslo_concurrency.lockutils [req-4ad44461-5036-497d-8ff0-06e1e1aee5f2 req-ee26498a-0b95-49ac-89ef-6260c0d0a2fc service nova] Releasing lock "refresh_cache-dfeeed37-8c84-4ecc-87ea-f4239f512fb1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 779.958022] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed3362b0-8d77-4318-af83-57958f5690d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.964901] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a335d42c-6494-4b46-ba36-bebd429ae063 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.998256] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8924526d-174b-42b8-9776-f18fe0f33c4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.006801] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e55e3026-6dca-4667-a4ee-0bfcd954eccf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.021705] env[68217]: DEBUG nova.compute.provider_tree [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.155612] env[68217]: DEBUG oslo_concurrency.lockutils [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.155612] env[68217]: DEBUG oslo_concurrency.lockutils [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquired lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 780.155612] env[68217]: DEBUG nova.network.neutron [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 780.317611] env[68217]: DEBUG nova.compute.manager [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Received event network-changed-fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 780.317611] env[68217]: DEBUG nova.compute.manager [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Refreshing instance network info cache due to event network-changed-fd7f0c32-5631-4c5d-9e7a-12a133f76232. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 780.317611] env[68217]: DEBUG oslo_concurrency.lockutils [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] Acquiring lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.525427] env[68217]: DEBUG nova.scheduler.client.report [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 780.832384] env[68217]: DEBUG nova.network.neutron [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Updated VIF entry in instance network info cache for port 5417460d-31c2-4462-b8aa-192085fc884f. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 780.834768] env[68217]: DEBUG nova.network.neutron [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Updating instance_info_cache with network_info: [{"id": "5417460d-31c2-4462-b8aa-192085fc884f", "address": "fa:16:3e:55:a8:37", "network": {"id": "1bc7f0b6-9537-490a-8acf-d4b9bee78802", "bridge": "br-int", "label": "tempest-ServersWithSpecificFlavorTestJSON-1016751512-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.215", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "36c9130982204910a139a45ddad542c3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "42f08482-a1da-405d-9918-d733d9f5173c", "external-id": "nsx-vlan-transportzone-381", "segmentation_id": 381, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5417460d-31", "ovs_interfaceid": "5417460d-31c2-4462-b8aa-192085fc884f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.032258] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 781.032258] env[68217]: DEBUG nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 781.035381] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.443s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.037668] env[68217]: INFO nova.compute.claims [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 781.170735] env[68217]: DEBUG nova.network.neutron [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updating instance_info_cache with network_info: [{"id": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "address": "fa:16:3e:f1:18:93", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd7f0c32-56", "ovs_interfaceid": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.335638] env[68217]: DEBUG oslo_concurrency.lockutils [req-dd7c117b-1c02-4eaa-840b-fd989e8226e6 req-c23fbfa3-1631-4ee1-8596-57b640271dc1 service nova] Releasing lock "refresh_cache-7584180b-efa6-4038-9f3a-619ab7937553" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.544785] env[68217]: DEBUG nova.compute.utils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 781.546207] env[68217]: DEBUG nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 781.546432] env[68217]: DEBUG nova.network.neutron [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 781.593207] env[68217]: DEBUG nova.policy [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6222896997d440cc845ef1fc7c3b6d61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b603f67ee5544169b93c24c6a1900acc', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 781.673517] env[68217]: DEBUG oslo_concurrency.lockutils [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Releasing lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 781.674829] env[68217]: DEBUG oslo_concurrency.lockutils [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] Acquired lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 781.675032] env[68217]: DEBUG nova.network.neutron [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Refreshing network info cache for port fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 781.733111] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "149bd497-4ee6-4ca2-9d18-b276e773aedf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 781.733355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 781.946960] env[68217]: DEBUG nova.network.neutron [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Successfully created port: b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.051159] env[68217]: DEBUG nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 782.178710] env[68217]: DEBUG nova.compute.manager [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 782.181529] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305ea456-98cc-4432-9fc6-b5e7dc1a1071 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.496521] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f63e41-8aa6-4cb3-83b6-cc0f72043b9d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.509839] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac74be27-f4ea-4f40-b3e9-54b7c43a80c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.567690] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eeeda88-7eaf-420c-894b-a2425f418dce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.577725] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-818f9ad3-78d0-466f-8d41-3a6d6e5deab4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.597400] env[68217]: DEBUG nova.compute.provider_tree [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 782.601141] env[68217]: DEBUG nova.network.neutron [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updated VIF entry in instance network info cache for port fd7f0c32-5631-4c5d-9e7a-12a133f76232. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 782.601196] env[68217]: DEBUG nova.network.neutron [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updating instance_info_cache with network_info: [{"id": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "address": "fa:16:3e:f1:18:93", "network": {"id": "b3eaa102-7723-4193-97a7-84c3b6d87de4", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-1747363513-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b99296b92df248d684d9e224d27bdcbc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c330dbdb-ad20-4e7e-8a12-66e4a914a84a", "external-id": "nsx-vlan-transportzone-181", "segmentation_id": 181, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfd7f0c32-56", "ovs_interfaceid": "fd7f0c32-5631-4c5d-9e7a-12a133f76232", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.067058] env[68217]: DEBUG nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 783.092779] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 783.093044] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 783.093203] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 783.093571] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 783.093571] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 783.093672] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 783.093853] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 783.094023] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 783.094192] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 783.094352] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 783.094521] env[68217]: DEBUG nova.virt.hardware [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 783.095709] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a2cda8-4ef1-414c-8109-871fb7b143de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.100729] env[68217]: DEBUG nova.scheduler.client.report [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 783.108539] env[68217]: DEBUG oslo_concurrency.lockutils [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] Releasing lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 783.108960] env[68217]: DEBUG nova.compute.manager [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Received event network-changed-86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.109074] env[68217]: DEBUG nova.compute.manager [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Refreshing instance network info cache due to event network-changed-86568bc3-8f1e-4880-9a22-48003fc7babd. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 783.109348] env[68217]: DEBUG oslo_concurrency.lockutils [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] Acquiring lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.109554] env[68217]: DEBUG oslo_concurrency.lockutils [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] Acquired lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 783.109780] env[68217]: DEBUG nova.network.neutron [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Refreshing network info cache for port 86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 783.112941] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813b061a-8f02-4d47-b7ed-93ff154470f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.206277] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddd5da3-43cb-415c-92e2-617a40fd3435 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.214979] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Doing hard reboot of VM {{(pid=68217) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 783.215282] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-6af83500-2ebc-4794-bafb-53dc408e7500 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.222852] env[68217]: DEBUG oslo_vmware.api [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 783.222852] env[68217]: value = "task-2961116" [ 783.222852] env[68217]: _type = "Task" [ 783.222852] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 783.231699] env[68217]: DEBUG oslo_vmware.api [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961116, 'name': ResetVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 783.413173] env[68217]: DEBUG nova.network.neutron [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updated VIF entry in instance network info cache for port 86568bc3-8f1e-4880-9a22-48003fc7babd. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 783.413173] env[68217]: DEBUG nova.network.neutron [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updating instance_info_cache with network_info: [{"id": "86568bc3-8f1e-4880-9a22-48003fc7babd", "address": "fa:16:3e:92:6c:cf", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86568bc3-8f", "ovs_interfaceid": "86568bc3-8f1e-4880-9a22-48003fc7babd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.612862] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.577s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.613412] env[68217]: DEBUG nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 783.617797] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.835s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.619103] env[68217]: INFO nova.compute.claims [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 783.703343] env[68217]: DEBUG nova.compute.manager [req-4581ef36-fe4a-4d68-ace2-e89d0ac90e73 req-9de45751-8ccc-45e9-8d84-f5bdcf244ba3 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Received event network-vif-plugged-b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 783.703343] env[68217]: DEBUG oslo_concurrency.lockutils [req-4581ef36-fe4a-4d68-ace2-e89d0ac90e73 req-9de45751-8ccc-45e9-8d84-f5bdcf244ba3 service nova] Acquiring lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 783.703343] env[68217]: DEBUG oslo_concurrency.lockutils [req-4581ef36-fe4a-4d68-ace2-e89d0ac90e73 req-9de45751-8ccc-45e9-8d84-f5bdcf244ba3 service nova] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 783.703343] env[68217]: DEBUG oslo_concurrency.lockutils [req-4581ef36-fe4a-4d68-ace2-e89d0ac90e73 req-9de45751-8ccc-45e9-8d84-f5bdcf244ba3 service nova] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 783.703343] env[68217]: DEBUG nova.compute.manager [req-4581ef36-fe4a-4d68-ace2-e89d0ac90e73 req-9de45751-8ccc-45e9-8d84-f5bdcf244ba3 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] No waiting events found dispatching network-vif-plugged-b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 783.703691] env[68217]: WARNING nova.compute.manager [req-4581ef36-fe4a-4d68-ace2-e89d0ac90e73 req-9de45751-8ccc-45e9-8d84-f5bdcf244ba3 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Received unexpected event network-vif-plugged-b687a815-30c5-4ac1-aed3-a25a04a96474 for instance with vm_state building and task_state spawning. [ 783.735361] env[68217]: DEBUG oslo_vmware.api [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961116, 'name': ResetVM_Task, 'duration_secs': 0.100086} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 783.736155] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Did hard reboot of VM {{(pid=68217) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 783.739179] env[68217]: DEBUG nova.compute.manager [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 783.739179] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d9c2018-5f2e-44cd-a40c-c9195a42d923 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.787747] env[68217]: DEBUG nova.network.neutron [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Successfully updated port: b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 783.916070] env[68217]: DEBUG oslo_concurrency.lockutils [req-0236cf63-340f-4963-937e-a0bcd8d383f1 req-bf44b73f-5360-4bf8-bac9-3127a16f60ac service nova] Releasing lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 784.128396] env[68217]: DEBUG nova.compute.utils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 784.135914] env[68217]: DEBUG nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 784.135914] env[68217]: DEBUG nova.network.neutron [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 784.213129] env[68217]: DEBUG nova.policy [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85faaafec6b64b64a4173f056aa7ac09', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bde4bb32b82948dd991d1fb8890c991b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 784.254056] env[68217]: DEBUG oslo_concurrency.lockutils [None req-005f0a31-7f0b-4a34-b48c-5bec10d38a83 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.630s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 784.292093] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 784.292248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquired lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 784.292402] env[68217]: DEBUG nova.network.neutron [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 784.637663] env[68217]: DEBUG nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 784.773569] env[68217]: DEBUG nova.network.neutron [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Successfully created port: 0abd5109-c94f-4eba-b6b2-ca8b28794157 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 784.845638] env[68217]: DEBUG nova.network.neutron [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 785.110143] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-717f1830-8b69-4e3d-bf14-1fe428adb39f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.124515] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58580f5-3ffb-4dab-b557-77abc3c44fe7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.164140] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae54ca4-e038-4584-9b7d-97abe5e1f6d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.173731] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54ed27f9-e61f-4482-aee3-b901968959dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.191144] env[68217]: DEBUG nova.compute.provider_tree [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.201785] env[68217]: DEBUG nova.network.neutron [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Updating instance_info_cache with network_info: [{"id": "b687a815-30c5-4ac1-aed3-a25a04a96474", "address": "fa:16:3e:03:cd:47", "network": {"id": "6ab1a831-f518-43fa-a556-16fb6aa83056", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59133825-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b603f67ee5544169b93c24c6a1900acc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb687a815-30", "ovs_interfaceid": "b687a815-30c5-4ac1-aed3-a25a04a96474", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.314506] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "693d6a74-a671-4d02-8798-cd3975507428" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.314870] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.315022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "693d6a74-a671-4d02-8798-cd3975507428-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 785.315140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.315313] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.317323] env[68217]: INFO nova.compute.manager [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Terminating instance [ 785.669189] env[68217]: DEBUG nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 785.696204] env[68217]: DEBUG nova.scheduler.client.report [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 785.701310] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 785.703357] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 785.703569] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 785.703770] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 785.703916] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 785.704071] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 785.704868] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 785.704868] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 785.704868] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 785.704868] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 785.705081] env[68217]: DEBUG nova.virt.hardware [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 785.705543] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Releasing lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 785.705831] env[68217]: DEBUG nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Instance network_info: |[{"id": "b687a815-30c5-4ac1-aed3-a25a04a96474", "address": "fa:16:3e:03:cd:47", "network": {"id": "6ab1a831-f518-43fa-a556-16fb6aa83056", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59133825-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b603f67ee5544169b93c24c6a1900acc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb687a815-30", "ovs_interfaceid": "b687a815-30c5-4ac1-aed3-a25a04a96474", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 785.706760] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee3279a0-8e22-4096-bc82-100a0d5ca319 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.714915] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.097s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 785.715419] env[68217]: DEBUG nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 785.718260] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:03:cd:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '298bb8ef-4765-494c-b157-7a349218bd1e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b687a815-30c5-4ac1-aed3-a25a04a96474', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 785.727067] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Creating folder: Project (b603f67ee5544169b93c24c6a1900acc). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 785.727397] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 25.681s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 785.727607] env[68217]: DEBUG nova.objects.instance [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lazy-loading 'resources' on Instance uuid 71243775-e8df-4cc5-85c9-d64a244b4426 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 785.729977] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cfd525fb-9429-40d2-ba16-30a3d6280926 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.740303] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cada68-b69e-4d11-8e6d-3ed66e039029 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.758026] env[68217]: DEBUG nova.compute.manager [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Received event network-changed-b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 785.758026] env[68217]: DEBUG nova.compute.manager [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Refreshing instance network info cache due to event network-changed-b687a815-30c5-4ac1-aed3-a25a04a96474. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 785.758026] env[68217]: DEBUG oslo_concurrency.lockutils [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] Acquiring lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 785.758026] env[68217]: DEBUG oslo_concurrency.lockutils [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] Acquired lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 785.758026] env[68217]: DEBUG nova.network.neutron [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Refreshing network info cache for port b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 785.758351] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Created folder: Project (b603f67ee5544169b93c24c6a1900acc) in parent group-v594094. [ 785.758351] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Creating folder: Instances. Parent ref: group-v594243. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 785.758727] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12766045-f77d-4613-80c6-8c219458224d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.782203] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Created folder: Instances in parent group-v594243. [ 785.782484] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 785.782698] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 785.783205] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4dcb1d36-14af-499d-a564-f0f94543bf28 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.805347] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 785.805347] env[68217]: value = "task-2961120" [ 785.805347] env[68217]: _type = "Task" [ 785.805347] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.814010] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961120, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 785.820672] env[68217]: DEBUG nova.compute.manager [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 785.820929] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 785.821815] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9cb87c-926f-4009-ae72-d654422c986b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.831682] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 785.835020] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79af20a8-b889-47e1-a743-a8c70bec3368 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.840666] env[68217]: DEBUG oslo_vmware.api [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 785.840666] env[68217]: value = "task-2961121" [ 785.840666] env[68217]: _type = "Task" [ 785.840666] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 785.855234] env[68217]: DEBUG oslo_vmware.api [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961121, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.231359] env[68217]: DEBUG nova.compute.utils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 786.236737] env[68217]: DEBUG nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 786.236976] env[68217]: DEBUG nova.network.neutron [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 786.322485] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961120, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.364366] env[68217]: DEBUG oslo_vmware.api [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961121, 'name': PowerOffVM_Task, 'duration_secs': 0.340202} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.369047] env[68217]: DEBUG nova.policy [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10c1ddac3d4946f88e9762a2bea8cfa9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '726c3dbb291b49b39db3ef87e35cdfbd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 786.370900] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 786.371268] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 786.371912] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1265b44-ecb4-4f71-9b35-c817d9d7fec4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.484025] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 786.484025] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 786.484025] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Deleting the datastore file [datastore2] 693d6a74-a671-4d02-8798-cd3975507428 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 786.484025] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-79801514-2c51-499a-b7f9-e87b4411a2d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.496283] env[68217]: DEBUG oslo_vmware.api [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 786.496283] env[68217]: value = "task-2961123" [ 786.496283] env[68217]: _type = "Task" [ 786.496283] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.510879] env[68217]: DEBUG oslo_vmware.api [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.731032] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74d83af-66b1-4c4f-9b95-2ccb7bdd2f25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.734299] env[68217]: DEBUG nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 786.743255] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bae23f3-e996-4d87-a46d-8ede0f14822b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.788767] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f4437f4-4708-4a6a-a808-1995ee0661cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.799876] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b443a67-718b-4a36-8544-079f5d90efdd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.817377] env[68217]: DEBUG nova.compute.provider_tree [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 786.830696] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961120, 'name': CreateVM_Task, 'duration_secs': 0.633022} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 786.830696] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 786.830696] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.830696] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 786.830908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 786.831134] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b39cf379-e2be-44df-a67b-4322a4c7fd49 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.837762] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 786.837762] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5216c11b-7bbb-1e09-961f-7031007b0a0b" [ 786.837762] env[68217]: _type = "Task" [ 786.837762] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 786.848998] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5216c11b-7bbb-1e09-961f-7031007b0a0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 786.923712] env[68217]: DEBUG nova.network.neutron [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Updated VIF entry in instance network info cache for port b687a815-30c5-4ac1-aed3-a25a04a96474. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 786.923712] env[68217]: DEBUG nova.network.neutron [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Updating instance_info_cache with network_info: [{"id": "b687a815-30c5-4ac1-aed3-a25a04a96474", "address": "fa:16:3e:03:cd:47", "network": {"id": "6ab1a831-f518-43fa-a556-16fb6aa83056", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59133825-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b603f67ee5544169b93c24c6a1900acc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb687a815-30", "ovs_interfaceid": "b687a815-30c5-4ac1-aed3-a25a04a96474", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.012645] env[68217]: DEBUG oslo_vmware.api [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.365567} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.012997] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 787.013237] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 787.013442] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 787.013649] env[68217]: INFO nova.compute.manager [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Took 1.19 seconds to destroy the instance on the hypervisor. [ 787.013985] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 787.014144] env[68217]: DEBUG nova.compute.manager [-] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 787.014269] env[68217]: DEBUG nova.network.neutron [-] [instance: 693d6a74-a671-4d02-8798-cd3975507428] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 787.260806] env[68217]: DEBUG nova.network.neutron [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Successfully updated port: 0abd5109-c94f-4eba-b6b2-ca8b28794157 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 787.324365] env[68217]: DEBUG nova.scheduler.client.report [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 787.357449] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5216c11b-7bbb-1e09-961f-7031007b0a0b, 'name': SearchDatastore_Task, 'duration_secs': 0.01598} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.357449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.357449] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 787.357449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.359226] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.359226] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 787.359226] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8646ea65-1cc8-4bb2-9b1a-587d58167d0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.370256] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 787.370256] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 787.370494] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2bfd0c3a-f5ea-4cbe-a181-e641245a2a7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.381157] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 787.381157] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dc3e87-8172-6231-9c0d-b8fd200b8f4d" [ 787.381157] env[68217]: _type = "Task" [ 787.381157] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.395576] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dc3e87-8172-6231-9c0d-b8fd200b8f4d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.429063] env[68217]: DEBUG oslo_concurrency.lockutils [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] Releasing lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.429063] env[68217]: DEBUG nova.compute.manager [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Received event network-changed-fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 787.429063] env[68217]: DEBUG nova.compute.manager [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Refreshing instance network info cache due to event network-changed-fd7f0c32-5631-4c5d-9e7a-12a133f76232. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 787.429063] env[68217]: DEBUG oslo_concurrency.lockutils [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] Acquiring lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.429063] env[68217]: DEBUG oslo_concurrency.lockutils [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] Acquired lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.429314] env[68217]: DEBUG nova.network.neutron [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Refreshing network info cache for port fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 787.565612] env[68217]: DEBUG nova.network.neutron [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Successfully created port: d1428ec3-01c4-4a36-9a5b-dba91c81f279 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 787.746803] env[68217]: DEBUG nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 787.763385] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.763676] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 787.764707] env[68217]: DEBUG nova.network.neutron [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 787.787333] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 787.787851] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 787.788062] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 787.788304] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 787.788520] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 787.788728] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 787.788991] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 787.789202] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 787.789408] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 787.789633] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 787.789848] env[68217]: DEBUG nova.virt.hardware [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 787.792118] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21efedb5-8b63-471c-9554-19ac2f138760 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.804481] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56fc9d1-6116-471a-a4a8-e2f2fb493d19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.830420] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.103s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 787.836188] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.727s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 787.840021] env[68217]: INFO nova.compute.claims [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 787.865424] env[68217]: INFO nova.scheduler.client.report [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Deleted allocations for instance 71243775-e8df-4cc5-85c9-d64a244b4426 [ 787.895246] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dc3e87-8172-6231-9c0d-b8fd200b8f4d, 'name': SearchDatastore_Task, 'duration_secs': 0.023606} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.899025] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1431d02b-d6e6-41cf-b606-3ab5552d52eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.909247] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 787.909247] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224f015-c8ae-9749-0d9c-f6792bba4fcf" [ 787.909247] env[68217]: _type = "Task" [ 787.909247] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.926053] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224f015-c8ae-9749-0d9c-f6792bba4fcf, 'name': SearchDatastore_Task, 'duration_secs': 0.011732} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 787.928076] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 787.928076] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2/e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 787.928076] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-12980c5d-2590-47f9-8200-a5ba88d481c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.938611] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 787.938611] env[68217]: value = "task-2961124" [ 787.938611] env[68217]: _type = "Task" [ 787.938611] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 787.949717] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961124, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 787.975592] env[68217]: INFO nova.network.neutron [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Port fd7f0c32-5631-4c5d-9e7a-12a133f76232 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 787.975739] env[68217]: DEBUG nova.network.neutron [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.341676] env[68217]: DEBUG nova.network.neutron [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 788.367219] env[68217]: DEBUG nova.compute.manager [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Received event network-vif-plugged-0abd5109-c94f-4eba-b6b2-ca8b28794157 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.367219] env[68217]: DEBUG oslo_concurrency.lockutils [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] Acquiring lock "e550084b-84dd-4ae8-8667-2edb45b49e2b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.367219] env[68217]: DEBUG oslo_concurrency.lockutils [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.367219] env[68217]: DEBUG oslo_concurrency.lockutils [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.367219] env[68217]: DEBUG nova.compute.manager [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] No waiting events found dispatching network-vif-plugged-0abd5109-c94f-4eba-b6b2-ca8b28794157 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 788.367514] env[68217]: WARNING nova.compute.manager [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Received unexpected event network-vif-plugged-0abd5109-c94f-4eba-b6b2-ca8b28794157 for instance with vm_state building and task_state spawning. [ 788.367514] env[68217]: DEBUG nova.compute.manager [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Received event network-changed-0abd5109-c94f-4eba-b6b2-ca8b28794157 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 788.367514] env[68217]: DEBUG nova.compute.manager [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Refreshing instance network info cache due to event network-changed-0abd5109-c94f-4eba-b6b2-ca8b28794157. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 788.367763] env[68217]: DEBUG oslo_concurrency.lockutils [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] Acquiring lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 788.373990] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7076bbc4-ac76-400a-9390-766de7cca0a6 tempest-ServerGroupTestJSON-126373622 tempest-ServerGroupTestJSON-126373622-project-member] Lock "71243775-e8df-4cc5-85c9-d64a244b4426" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 32.521s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 788.455498] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961124, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510294} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.456034] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2/e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 788.456403] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 788.456768] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2f287f30-dcdb-49b2-afd4-a7f231bf154c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.464421] env[68217]: DEBUG nova.network.neutron [-] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.468469] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 788.468469] env[68217]: value = "task-2961125" [ 788.468469] env[68217]: _type = "Task" [ 788.468469] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 788.480385] env[68217]: DEBUG oslo_concurrency.lockutils [req-fba0ad82-c36c-4533-82a1-3a0cf7cbf861 req-fba86522-70d4-470a-bdef-46eb363470e5 service nova] Releasing lock "refresh_cache-693d6a74-a671-4d02-8798-cd3975507428" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 788.483604] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961125, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 788.667039] env[68217]: DEBUG nova.network.neutron [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Updating instance_info_cache with network_info: [{"id": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "address": "fa:16:3e:7f:0a:36", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0abd5109-c9", "ovs_interfaceid": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.856939] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquiring lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 788.857305] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 788.967971] env[68217]: INFO nova.compute.manager [-] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Took 1.95 seconds to deallocate network for instance. [ 788.986415] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961125, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066722} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 788.986796] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 788.987798] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-598e493f-7d21-4332-a050-122d4be899d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.022996] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Reconfiguring VM instance instance-00000032 to attach disk [datastore2] e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2/e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 789.026468] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76a3810f-1f8f-4342-84ea-4b8843b55ed3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.050146] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 789.050146] env[68217]: value = "task-2961126" [ 789.050146] env[68217]: _type = "Task" [ 789.050146] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.061936] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961126, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.170965] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 789.171313] env[68217]: DEBUG nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Instance network_info: |[{"id": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "address": "fa:16:3e:7f:0a:36", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0abd5109-c9", "ovs_interfaceid": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 789.174018] env[68217]: DEBUG oslo_concurrency.lockutils [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] Acquired lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 789.174018] env[68217]: DEBUG nova.network.neutron [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Refreshing network info cache for port 0abd5109-c94f-4eba-b6b2-ca8b28794157 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 789.174018] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7f:0a:36', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4162774e-ec80-4d85-aeb4-fae77f197393', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0abd5109-c94f-4eba-b6b2-ca8b28794157', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 789.180763] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Creating folder: Project (bde4bb32b82948dd991d1fb8890c991b). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.182088] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-081392d3-4a8d-4477-bd47-887f01b033bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.200687] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Created folder: Project (bde4bb32b82948dd991d1fb8890c991b) in parent group-v594094. [ 789.200947] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Creating folder: Instances. Parent ref: group-v594246. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 789.201364] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f75d07e3-e41b-4555-aa4c-aa3fdc1bcb4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.217931] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Created folder: Instances in parent group-v594246. [ 789.218231] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 789.221238] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 789.222008] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-531f827a-44f7-44de-a500-6d56b53856c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.245418] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 789.245418] env[68217]: value = "task-2961129" [ 789.245418] env[68217]: _type = "Task" [ 789.245418] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.254174] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961129, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.335801] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c6b3972-7563-4703-8271-22b81a76c992 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.346854] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b37aaa94-84a9-4543-aa5c-a46b2c5ec490 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.382687] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2acfb44c-6775-4415-9de0-0d7582130687 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.390079] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ddcecdf-926f-4dd5-b7d5-831b089f3a6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.406879] env[68217]: DEBUG nova.compute.provider_tree [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 789.482114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 789.564232] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961126, 'name': ReconfigVM_Task, 'duration_secs': 0.300316} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 789.564542] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Reconfigured VM instance instance-00000032 to attach disk [datastore2] e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2/e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 789.565203] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11ca6474-870c-48e7-ac2e-fbaa155b23fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.573291] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 789.573291] env[68217]: value = "task-2961130" [ 789.573291] env[68217]: _type = "Task" [ 789.573291] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 789.589538] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961130, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.773648] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961129, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 789.912090] env[68217]: DEBUG nova.scheduler.client.report [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 790.063351] env[68217]: DEBUG nova.network.neutron [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Updated VIF entry in instance network info cache for port 0abd5109-c94f-4eba-b6b2-ca8b28794157. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 790.063686] env[68217]: DEBUG nova.network.neutron [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Updating instance_info_cache with network_info: [{"id": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "address": "fa:16:3e:7f:0a:36", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0abd5109-c9", "ovs_interfaceid": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 790.088230] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961130, 'name': Rename_Task, 'duration_secs': 0.172485} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.088500] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 790.088743] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8ede2af-5094-4e5f-9d37-b3a4fe6da4d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.097350] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 790.097350] env[68217]: value = "task-2961131" [ 790.097350] env[68217]: _type = "Task" [ 790.097350] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.111665] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961131, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.145896] env[68217]: DEBUG nova.network.neutron [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Successfully updated port: d1428ec3-01c4-4a36-9a5b-dba91c81f279 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 790.258473] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961129, 'name': CreateVM_Task, 'duration_secs': 0.641577} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.258473] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 790.260067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.260067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.260067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 790.260067] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-20a8d2e5-0b89-416a-8056-7104064b4a3d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.266463] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 790.266463] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520bdb77-1f9a-901b-fe80-c4f851d86418" [ 790.266463] env[68217]: _type = "Task" [ 790.266463] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.280012] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520bdb77-1f9a-901b-fe80-c4f851d86418, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.407437] env[68217]: DEBUG nova.compute.manager [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Received event network-vif-deleted-fd7f0c32-5631-4c5d-9e7a-12a133f76232 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.411126] env[68217]: DEBUG nova.compute.manager [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Received event network-vif-plugged-d1428ec3-01c4-4a36-9a5b-dba91c81f279 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.411126] env[68217]: DEBUG oslo_concurrency.lockutils [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] Acquiring lock "bd62c682-24f2-4559-887a-03186409f699-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 790.411126] env[68217]: DEBUG oslo_concurrency.lockutils [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] Lock "bd62c682-24f2-4559-887a-03186409f699-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.411126] env[68217]: DEBUG oslo_concurrency.lockutils [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] Lock "bd62c682-24f2-4559-887a-03186409f699-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.411126] env[68217]: DEBUG nova.compute.manager [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] No waiting events found dispatching network-vif-plugged-d1428ec3-01c4-4a36-9a5b-dba91c81f279 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 790.411478] env[68217]: WARNING nova.compute.manager [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Received unexpected event network-vif-plugged-d1428ec3-01c4-4a36-9a5b-dba91c81f279 for instance with vm_state building and task_state spawning. [ 790.411478] env[68217]: DEBUG nova.compute.manager [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Received event network-changed-d1428ec3-01c4-4a36-9a5b-dba91c81f279 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 790.411478] env[68217]: DEBUG nova.compute.manager [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Refreshing instance network info cache due to event network-changed-d1428ec3-01c4-4a36-9a5b-dba91c81f279. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 790.411478] env[68217]: DEBUG oslo_concurrency.lockutils [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] Acquiring lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.411478] env[68217]: DEBUG oslo_concurrency.lockutils [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] Acquired lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.411647] env[68217]: DEBUG nova.network.neutron [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Refreshing network info cache for port d1428ec3-01c4-4a36-9a5b-dba91c81f279 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 790.416572] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.581s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 790.417078] env[68217]: DEBUG nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 790.420737] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.728s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 790.422177] env[68217]: INFO nova.compute.claims [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 790.567748] env[68217]: DEBUG oslo_concurrency.lockutils [req-f355a276-41d1-412b-8e79-2edc5b6c439f req-d05ee7d6-cb64-4131-b5a8-17daa49a4775 service nova] Releasing lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.619983] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961131, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.646045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.780556] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520bdb77-1f9a-901b-fe80-c4f851d86418, 'name': SearchDatastore_Task, 'duration_secs': 0.015407} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 790.780671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 790.781412] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 790.781412] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.781529] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 790.781661] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 790.781958] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2335f7ed-7f46-4451-a18d-e1cc53703155 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.793257] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 790.793458] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 790.795805] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e3df7f9c-45a8-4bef-bbf7-4f4613a1ec94 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 790.802699] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 790.802699] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f68e72-6bf3-1346-b8f1-cdb7912559b7" [ 790.802699] env[68217]: _type = "Task" [ 790.802699] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 790.812942] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f68e72-6bf3-1346-b8f1-cdb7912559b7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 790.928312] env[68217]: DEBUG nova.compute.utils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 790.936477] env[68217]: DEBUG nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 790.936477] env[68217]: DEBUG nova.network.neutron [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 790.987716] env[68217]: DEBUG nova.network.neutron [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 791.041612] env[68217]: DEBUG nova.policy [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36276be4c67c4abfa0941293d4cc800b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebfeb38b81794c558c1164cecd7fa221', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 791.066204] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.066485] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.110939] env[68217]: DEBUG oslo_vmware.api [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961131, 'name': PowerOnVM_Task, 'duration_secs': 0.559023} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.111221] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 791.111422] env[68217]: INFO nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Took 8.04 seconds to spawn the instance on the hypervisor. [ 791.112549] env[68217]: DEBUG nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 791.113143] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a12a3d7a-b905-4195-bbd7-9ec0ab226414 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.116737] env[68217]: DEBUG nova.network.neutron [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 791.329847] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f68e72-6bf3-1346-b8f1-cdb7912559b7, 'name': SearchDatastore_Task, 'duration_secs': 0.012127} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.331036] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86d7adf9-b2eb-431a-bb07-699bf58d4927 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.347706] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 791.347706] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fa6415-73ca-ed65-a4ab-ba533e224e5a" [ 791.347706] env[68217]: _type = "Task" [ 791.347706] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.363112] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fa6415-73ca-ed65-a4ab-ba533e224e5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.435289] env[68217]: DEBUG nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 791.535661] env[68217]: DEBUG nova.network.neutron [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Successfully created port: e5987781-918b-4d17-8151-7b4661f8b9d3 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 791.548250] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 791.548498] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 791.621380] env[68217]: DEBUG oslo_concurrency.lockutils [req-2fc086b6-30fd-4a1a-acd1-c42e626b6fa8 req-26b04648-1228-4ffa-976a-b49427007aff service nova] Releasing lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.622382] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 791.622551] env[68217]: DEBUG nova.network.neutron [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 791.633362] env[68217]: INFO nova.compute.manager [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Took 39.64 seconds to build instance. [ 791.860220] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fa6415-73ca-ed65-a4ab-ba533e224e5a, 'name': SearchDatastore_Task, 'duration_secs': 0.014217} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 791.863090] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 791.863499] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/e550084b-84dd-4ae8-8667-2edb45b49e2b.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 791.864082] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e1182f22-3140-4efe-b2e8-2d181f7016f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.874030] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 791.874030] env[68217]: value = "task-2961132" [ 791.874030] env[68217]: _type = "Task" [ 791.874030] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 791.879713] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e78cc2dc-ff56-4aea-9b2e-004194abe3d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.890586] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48b03242-40a4-400e-bbd8-04224094bee1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.893915] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 791.923371] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f072052-74cb-4ca2-ad25-51299d6bb0ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.931833] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6167234-143a-4614-ac20-c5e18b6d7325 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.949843] env[68217]: DEBUG nova.compute.provider_tree [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 792.134993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4cb662e9-df5e-4174-b6a2-70504a2a6a01 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.193s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.164279] env[68217]: DEBUG nova.network.neutron [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 792.342100] env[68217]: DEBUG nova.network.neutron [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance_info_cache with network_info: [{"id": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "address": "fa:16:3e:0c:de:50", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1428ec3-01", "ovs_interfaceid": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.385359] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961132, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.453390] env[68217]: DEBUG nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 792.456190] env[68217]: DEBUG nova.scheduler.client.report [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 792.481325] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 792.481624] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 792.481787] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 792.481977] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 792.482137] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 792.482281] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 792.482494] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 792.482653] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 792.482895] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 792.483047] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 792.483253] env[68217]: DEBUG nova.virt.hardware [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 792.484402] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51444ad7-921b-422b-983a-2aeebfb7a43e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.493412] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8451a8db-8f4b-4fbe-91d3-ab5a11ee1259 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.638049] env[68217]: DEBUG nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 792.844481] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 792.844837] env[68217]: DEBUG nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Instance network_info: |[{"id": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "address": "fa:16:3e:0c:de:50", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1428ec3-01", "ovs_interfaceid": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 792.845298] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:de:50', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd1428ec3-01c4-4a36-9a5b-dba91c81f279', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 792.852989] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Creating folder: Project (726c3dbb291b49b39db3ef87e35cdfbd). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.853314] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3920027-f4a4-48a4-8ce8-2b1c78150e5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.869172] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Created folder: Project (726c3dbb291b49b39db3ef87e35cdfbd) in parent group-v594094. [ 792.869436] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Creating folder: Instances. Parent ref: group-v594249. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 792.869813] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-44302a8f-11a9-47c0-9585-92055515f46b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.883409] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Created folder: Instances in parent group-v594249. [ 792.883816] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 792.884058] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd62c682-24f2-4559-887a-03186409f699] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 792.887306] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d3f35381-2ada-483c-aa98-bd4cf4bd47fd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 792.904198] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961132, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.908243] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 792.908243] env[68217]: value = "task-2961135" [ 792.908243] env[68217]: _type = "Task" [ 792.908243] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 792.916957] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961135, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 792.960821] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.540s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 792.961562] env[68217]: DEBUG nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 792.965402] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.656s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 792.971957] env[68217]: INFO nova.compute.claims [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 793.174334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 793.387036] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961132, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.020598} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.387326] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/e550084b-84dd-4ae8-8667-2edb45b49e2b.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 793.387630] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 793.387744] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffefc15c-5cfa-4807-84dc-49a3802240f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.396984] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 793.396984] env[68217]: value = "task-2961136" [ 793.396984] env[68217]: _type = "Task" [ 793.396984] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.408582] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961136, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.419106] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961135, 'name': CreateVM_Task, 'duration_secs': 0.369999} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.419258] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd62c682-24f2-4559-887a-03186409f699] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 793.419958] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.420136] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.420453] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 793.420713] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-de806d87-08c5-415e-93cd-25a587091dc5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.426271] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 793.426271] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528c6bb9-b075-6744-8154-8b5076b9e8e3" [ 793.426271] env[68217]: _type = "Task" [ 793.426271] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.435333] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528c6bb9-b075-6744-8154-8b5076b9e8e3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.479011] env[68217]: DEBUG nova.compute.utils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 793.482393] env[68217]: DEBUG nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 793.482621] env[68217]: DEBUG nova.network.neutron [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 793.642199] env[68217]: DEBUG nova.policy [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '554b6b3d22404c0ba52c739b3c7b98a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb62d18446841a3b2a6ac25ab5dc869', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 793.861542] env[68217]: DEBUG nova.compute.manager [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Received event network-changed-b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 793.861715] env[68217]: DEBUG nova.compute.manager [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Refreshing instance network info cache due to event network-changed-b687a815-30c5-4ac1-aed3-a25a04a96474. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 793.862576] env[68217]: DEBUG oslo_concurrency.lockutils [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] Acquiring lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.862576] env[68217]: DEBUG oslo_concurrency.lockutils [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] Acquired lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.863178] env[68217]: DEBUG nova.network.neutron [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Refreshing network info cache for port b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 793.910925] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961136, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068121} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.911935] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 793.913419] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b369faee-5cbe-40c9-945f-39c7c9dde302 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.918239] env[68217]: DEBUG nova.network.neutron [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Successfully updated port: e5987781-918b-4d17-8151-7b4661f8b9d3 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 793.939385] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/e550084b-84dd-4ae8-8667-2edb45b49e2b.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 793.940759] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "refresh_cache-41d279f2-477b-44b2-9eb9-7b782c9c890f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.940896] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "refresh_cache-41d279f2-477b-44b2-9eb9-7b782c9c890f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.941121] env[68217]: DEBUG nova.network.neutron [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 793.945454] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45c44d41-8ab1-4999-b9f9-f6ee97d5a60b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.973031] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528c6bb9-b075-6744-8154-8b5076b9e8e3, 'name': SearchDatastore_Task, 'duration_secs': 0.009338} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 793.974353] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 793.974600] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 793.974830] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 793.974977] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 793.975171] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 793.975493] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 793.975493] env[68217]: value = "task-2961137" [ 793.975493] env[68217]: _type = "Task" [ 793.975493] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 793.975983] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6bfddd28-65d6-4141-954c-e029cd6fa196 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.983469] env[68217]: DEBUG nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 793.995998] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961137, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 793.999622] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 794.000170] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 794.000601] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aa97820-bbba-4512-92f0-afe8b2094378 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.006361] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 794.006361] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5269fa62-a1ec-1391-0677-5c94a7708085" [ 794.006361] env[68217]: _type = "Task" [ 794.006361] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.015020] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5269fa62-a1ec-1391-0677-5c94a7708085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.043451] env[68217]: DEBUG nova.network.neutron [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 794.337662] env[68217]: DEBUG nova.network.neutron [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Updating instance_info_cache with network_info: [{"id": "e5987781-918b-4d17-8151-7b4661f8b9d3", "address": "fa:16:3e:2a:6b:74", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5987781-91", "ovs_interfaceid": "e5987781-918b-4d17-8151-7b4661f8b9d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.342699] env[68217]: DEBUG nova.network.neutron [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Successfully created port: e14e7381-56d1-46d7-ac1f-e49cea827394 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 794.387235] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa02ddf6-6270-4554-b3ba-377949f37786 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.397039] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7130f0e-4691-441b-be82-2165f87c1b89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.433915] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313b9f47-77cd-40c5-84a6-3ed4280edf66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.442597] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65a14a36-ca7f-4375-9a6b-8dab575b0a2b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.458340] env[68217]: DEBUG nova.compute.provider_tree [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 794.492032] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961137, 'name': ReconfigVM_Task, 'duration_secs': 0.30922} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.492032] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Reconfigured VM instance instance-00000033 to attach disk [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/e550084b-84dd-4ae8-8667-2edb45b49e2b.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 794.492032] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2b04b88-d2fc-4469-a80b-1d4457de2647 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.498559] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 794.498559] env[68217]: value = "task-2961138" [ 794.498559] env[68217]: _type = "Task" [ 794.498559] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.511537] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961138, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.519227] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5269fa62-a1ec-1391-0677-5c94a7708085, 'name': SearchDatastore_Task, 'duration_secs': 0.018109} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 794.520080] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6203c9d7-73c6-49f1-8742-e12133f3abf6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.526404] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 794.526404] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527761e4-4de7-173e-fb02-34a757d0ee31" [ 794.526404] env[68217]: _type = "Task" [ 794.526404] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.535609] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527761e4-4de7-173e-fb02-34a757d0ee31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.814418] env[68217]: DEBUG nova.network.neutron [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Updated VIF entry in instance network info cache for port b687a815-30c5-4ac1-aed3-a25a04a96474. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 794.814769] env[68217]: DEBUG nova.network.neutron [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Updating instance_info_cache with network_info: [{"id": "b687a815-30c5-4ac1-aed3-a25a04a96474", "address": "fa:16:3e:03:cd:47", "network": {"id": "6ab1a831-f518-43fa-a556-16fb6aa83056", "bridge": "br-int", "label": "tempest-ServersV294TestFqdnHostnames-59133825-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.225", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b603f67ee5544169b93c24c6a1900acc", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "298bb8ef-4765-494c-b157-7a349218bd1e", "external-id": "nsx-vlan-transportzone-905", "segmentation_id": 905, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb687a815-30", "ovs_interfaceid": "b687a815-30c5-4ac1-aed3-a25a04a96474", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.845649] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "refresh_cache-41d279f2-477b-44b2-9eb9-7b782c9c890f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 794.846058] env[68217]: DEBUG nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Instance network_info: |[{"id": "e5987781-918b-4d17-8151-7b4661f8b9d3", "address": "fa:16:3e:2a:6b:74", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5987781-91", "ovs_interfaceid": "e5987781-918b-4d17-8151-7b4661f8b9d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 794.846363] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:6b:74', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e5987781-918b-4d17-8151-7b4661f8b9d3', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 794.853849] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 794.854085] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 794.854355] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ed9268f3-0159-4a7b-9652-7c99f4862dd5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 794.879019] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 794.879019] env[68217]: value = "task-2961139" [ 794.879019] env[68217]: _type = "Task" [ 794.879019] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 794.885421] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961139, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 794.960831] env[68217]: DEBUG nova.scheduler.client.report [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 795.001178] env[68217]: DEBUG nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 795.012979] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961138, 'name': Rename_Task, 'duration_secs': 0.154221} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.013531] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 795.013846] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e38aa8c-afd3-453f-b3fa-c0051716c50d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.022179] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 795.022179] env[68217]: value = "task-2961140" [ 795.022179] env[68217]: _type = "Task" [ 795.022179] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.027596] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 795.027837] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 795.027972] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 795.028172] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 795.028317] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 795.028460] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 795.028672] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 795.028826] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 795.028991] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 795.029168] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 795.029391] env[68217]: DEBUG nova.virt.hardware [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 795.030248] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1064886b-e58e-4781-a962-a73b69292da7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.038843] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961140, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.047231] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f0358d3-d546-46eb-8d48-09511e75e07b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.051417] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527761e4-4de7-173e-fb02-34a757d0ee31, 'name': SearchDatastore_Task, 'duration_secs': 0.019342} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.051613] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.051922] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] bd62c682-24f2-4559-887a-03186409f699/bd62c682-24f2-4559-887a-03186409f699.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 795.052632] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51837cea-15ff-4d9d-ba48-128388ff1652 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.070384] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 795.070384] env[68217]: value = "task-2961141" [ 795.070384] env[68217]: _type = "Task" [ 795.070384] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.078952] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961141, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.317836] env[68217]: DEBUG oslo_concurrency.lockutils [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] Releasing lock "refresh_cache-e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.318206] env[68217]: DEBUG nova.compute.manager [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Received event network-vif-plugged-e5987781-918b-4d17-8151-7b4661f8b9d3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.318469] env[68217]: DEBUG oslo_concurrency.lockutils [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] Acquiring lock "41d279f2-477b-44b2-9eb9-7b782c9c890f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 795.318788] env[68217]: DEBUG oslo_concurrency.lockutils [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.319038] env[68217]: DEBUG oslo_concurrency.lockutils [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.319269] env[68217]: DEBUG nova.compute.manager [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] No waiting events found dispatching network-vif-plugged-e5987781-918b-4d17-8151-7b4661f8b9d3 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 795.319516] env[68217]: WARNING nova.compute.manager [req-df78efb2-5665-4072-aa21-b0019e5d0001 req-d2573328-3272-4470-9057-de572693f7fb service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Received unexpected event network-vif-plugged-e5987781-918b-4d17-8151-7b4661f8b9d3 for instance with vm_state building and task_state spawning. [ 795.389347] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961139, 'name': CreateVM_Task, 'duration_secs': 0.366401} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.389612] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 795.390418] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.390616] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.391016] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 795.391329] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-73c7e206-238d-45f4-b1fc-9a04e3cdd575 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.398178] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 795.398178] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d10de7-bc9c-ed40-096a-c60d87593b08" [ 795.398178] env[68217]: _type = "Task" [ 795.398178] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.410312] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d10de7-bc9c-ed40-096a-c60d87593b08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.466701] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.501s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 795.467305] env[68217]: DEBUG nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 795.470628] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.702s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 795.472556] env[68217]: INFO nova.compute.claims [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 795.532928] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961140, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.583636] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961141, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.908949] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d10de7-bc9c-ed40-096a-c60d87593b08, 'name': SearchDatastore_Task, 'duration_secs': 0.05528} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 795.909233] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 795.909466] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 795.909695] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.909840] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.910025] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 795.910283] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9c7ccbbe-d565-4088-b0c8-578d43cc049d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.929125] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 795.929361] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 795.930501] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dae1edfd-be4f-4513-9d1a-d4e1abbc455c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 795.937584] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 795.937584] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521299fd-429f-1b87-50d4-57d78f146ed1" [ 795.937584] env[68217]: _type = "Task" [ 795.937584] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 795.939967] env[68217]: DEBUG nova.compute.manager [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Received event network-changed-e5987781-918b-4d17-8151-7b4661f8b9d3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 795.940202] env[68217]: DEBUG nova.compute.manager [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Refreshing instance network info cache due to event network-changed-e5987781-918b-4d17-8151-7b4661f8b9d3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 795.940419] env[68217]: DEBUG oslo_concurrency.lockutils [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] Acquiring lock "refresh_cache-41d279f2-477b-44b2-9eb9-7b782c9c890f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 795.940626] env[68217]: DEBUG oslo_concurrency.lockutils [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] Acquired lock "refresh_cache-41d279f2-477b-44b2-9eb9-7b782c9c890f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 795.940729] env[68217]: DEBUG nova.network.neutron [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Refreshing network info cache for port e5987781-918b-4d17-8151-7b4661f8b9d3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 795.950725] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521299fd-429f-1b87-50d4-57d78f146ed1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 795.978878] env[68217]: DEBUG nova.compute.utils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 795.983755] env[68217]: DEBUG nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 795.983843] env[68217]: DEBUG nova.network.neutron [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 796.033862] env[68217]: DEBUG oslo_vmware.api [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961140, 'name': PowerOnVM_Task, 'duration_secs': 0.6431} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.034462] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 796.034807] env[68217]: INFO nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Took 10.37 seconds to spawn the instance on the hypervisor. [ 796.034998] env[68217]: DEBUG nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 796.036795] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb852337-88ad-4dea-9a1b-96addbc3fb15 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.059602] env[68217]: DEBUG nova.policy [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '752b6d9ab4d64b1390ca8388fb28db15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad331ad8f44348f6b4c0a6c56977022d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 796.080812] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961141, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.387600] env[68217]: DEBUG nova.network.neutron [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Successfully updated port: e14e7381-56d1-46d7-ac1f-e49cea827394 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 796.452022] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521299fd-429f-1b87-50d4-57d78f146ed1, 'name': SearchDatastore_Task, 'duration_secs': 0.066941} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.453623] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-298cdc53-cbf9-4790-8756-53a0432ae6c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.459974] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 796.459974] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52aaee51-601c-e3d9-cfac-d959142f0eaa" [ 796.459974] env[68217]: _type = "Task" [ 796.459974] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.469281] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52aaee51-601c-e3d9-cfac-d959142f0eaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.486991] env[68217]: DEBUG nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 796.540018] env[68217]: DEBUG nova.network.neutron [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Successfully created port: b029c255-6f3f-41b3-ba5c-16ca2a968c6e {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 796.557828] env[68217]: INFO nova.compute.manager [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Took 40.98 seconds to build instance. [ 796.586342] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961141, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.458146} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.588989] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] bd62c682-24f2-4559-887a-03186409f699/bd62c682-24f2-4559-887a-03186409f699.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 796.589246] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 796.589729] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e4afa836-a8c9-4cfe-9f82-0b241ac9db53 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.599358] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 796.599358] env[68217]: value = "task-2961142" [ 796.599358] env[68217]: _type = "Task" [ 796.599358] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 796.610974] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 796.891547] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-7a01c2c5-3108-4382-85c5-a5ea5e6e160c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 796.891681] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-7a01c2c5-3108-4382-85c5-a5ea5e6e160c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 796.891851] env[68217]: DEBUG nova.network.neutron [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 796.914952] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe88e6e-46f1-46e4-958d-15d1c09a97f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.919326] env[68217]: DEBUG nova.network.neutron [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Updated VIF entry in instance network info cache for port e5987781-918b-4d17-8151-7b4661f8b9d3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 796.919704] env[68217]: DEBUG nova.network.neutron [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Updating instance_info_cache with network_info: [{"id": "e5987781-918b-4d17-8151-7b4661f8b9d3", "address": "fa:16:3e:2a:6b:74", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape5987781-91", "ovs_interfaceid": "e5987781-918b-4d17-8151-7b4661f8b9d3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 796.924105] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6001e7f-dca8-4bf9-9c87-96ee05576823 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.958360] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92db4abc-b469-4cff-95fe-e901d51f520e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.972458] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52aaee51-601c-e3d9-cfac-d959142f0eaa, 'name': SearchDatastore_Task, 'duration_secs': 0.060644} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 796.973442] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 796.973442] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 41d279f2-477b-44b2-9eb9-7b782c9c890f/41d279f2-477b-44b2-9eb9-7b782c9c890f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 796.974226] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a60ee0-7d57-458f-8136-67789896afb7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.977803] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-552e592a-4131-4edc-91ed-2dad16ac9571 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 796.998067] env[68217]: DEBUG nova.compute.provider_tree [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 797.001447] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 797.001447] env[68217]: value = "task-2961143" [ 797.001447] env[68217]: _type = "Task" [ 797.001447] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.013271] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961143, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.059559] env[68217]: DEBUG oslo_concurrency.lockutils [None req-94da95ca-6bbb-41b0-9110-bd1ec5bf0e1c tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 70.101s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.109010] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.241223} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.109300] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 797.110064] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddd635ea-1f04-4861-98c5-711968923fe5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.138361] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] bd62c682-24f2-4559-887a-03186409f699/bd62c682-24f2-4559-887a-03186409f699.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 797.139205] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-885c7fba-4225-4c06-832c-c80b07950d45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.162391] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 797.162391] env[68217]: value = "task-2961144" [ 797.162391] env[68217]: _type = "Task" [ 797.162391] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.173986] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961144, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.424663] env[68217]: DEBUG oslo_concurrency.lockutils [req-78a634fe-3cb0-4173-b485-6e34e565b8a1 req-37d7f659-56f5-41f3-964f-f6ed0ae99380 service nova] Releasing lock "refresh_cache-41d279f2-477b-44b2-9eb9-7b782c9c890f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 797.457116] env[68217]: DEBUG nova.network.neutron [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 797.499299] env[68217]: DEBUG nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 797.502931] env[68217]: DEBUG nova.scheduler.client.report [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 797.517024] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961143, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.477995} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 797.517479] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 41d279f2-477b-44b2-9eb9-7b782c9c890f/41d279f2-477b-44b2-9eb9-7b782c9c890f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 797.517600] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 797.517799] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ffc9f552-540c-4174-ae8d-01ddee0a2202 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.529717] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 797.529717] env[68217]: value = "task-2961145" [ 797.529717] env[68217]: _type = "Task" [ 797.529717] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 797.540208] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961145, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.542894] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 797.543145] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 797.543321] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 797.543524] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 797.543698] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 797.543845] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 797.544074] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 797.544261] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 797.544441] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 797.544604] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 797.544789] env[68217]: DEBUG nova.virt.hardware [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 797.545986] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70938db7-0a97-467c-ba4c-a5d0834742d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.556869] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bc8a49c-f960-495a-be44-6e9aa9c40920 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 797.565141] env[68217]: DEBUG nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 797.637880] env[68217]: DEBUG nova.network.neutron [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Updating instance_info_cache with network_info: [{"id": "e14e7381-56d1-46d7-ac1f-e49cea827394", "address": "fa:16:3e:40:05:71", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14e7381-56", "ovs_interfaceid": "e14e7381-56d1-46d7-ac1f-e49cea827394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 797.673709] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961144, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 797.727214] env[68217]: INFO nova.compute.manager [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Rescuing [ 797.727472] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 797.727620] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 797.727787] env[68217]: DEBUG nova.network.neutron [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 797.785575] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.785898] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 797.971558] env[68217]: DEBUG nova.compute.manager [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Received event network-vif-plugged-e14e7381-56d1-46d7-ac1f-e49cea827394 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.971843] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] Acquiring lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 797.972080] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 797.972274] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 797.972450] env[68217]: DEBUG nova.compute.manager [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] No waiting events found dispatching network-vif-plugged-e14e7381-56d1-46d7-ac1f-e49cea827394 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 797.972608] env[68217]: WARNING nova.compute.manager [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Received unexpected event network-vif-plugged-e14e7381-56d1-46d7-ac1f-e49cea827394 for instance with vm_state building and task_state spawning. [ 797.972754] env[68217]: DEBUG nova.compute.manager [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Received event network-changed-e14e7381-56d1-46d7-ac1f-e49cea827394 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 797.972933] env[68217]: DEBUG nova.compute.manager [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Refreshing instance network info cache due to event network-changed-e14e7381-56d1-46d7-ac1f-e49cea827394. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 797.973162] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] Acquiring lock "refresh_cache-7a01c2c5-3108-4382-85c5-a5ea5e6e160c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.012207] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.542s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 798.012729] env[68217]: DEBUG nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 798.015873] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.414s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 798.016116] env[68217]: DEBUG nova.objects.instance [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lazy-loading 'resources' on Instance uuid 7ec30097-1151-4b0d-8226-e4d34ea7b3c9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 798.040103] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961145, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069342} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.040364] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 798.041143] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80505c28-a30d-4d19-8603-e401c691f209 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.063935] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Reconfiguring VM instance instance-00000035 to attach disk [datastore2] 41d279f2-477b-44b2-9eb9-7b782c9c890f/41d279f2-477b-44b2-9eb9-7b782c9c890f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 798.064836] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff8d570f-758e-47a0-820e-f9a3d359a29f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.088999] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 798.088999] env[68217]: value = "task-2961146" [ 798.088999] env[68217]: _type = "Task" [ 798.088999] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.098385] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961146, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.099365] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.140913] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-7a01c2c5-3108-4382-85c5-a5ea5e6e160c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.141271] env[68217]: DEBUG nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Instance network_info: |[{"id": "e14e7381-56d1-46d7-ac1f-e49cea827394", "address": "fa:16:3e:40:05:71", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14e7381-56", "ovs_interfaceid": "e14e7381-56d1-46d7-ac1f-e49cea827394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 798.141568] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] Acquired lock "refresh_cache-7a01c2c5-3108-4382-85c5-a5ea5e6e160c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.141792] env[68217]: DEBUG nova.network.neutron [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Refreshing network info cache for port e14e7381-56d1-46d7-ac1f-e49cea827394 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 798.144700] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:05:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e14e7381-56d1-46d7-ac1f-e49cea827394', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 798.150431] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 798.151154] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 798.151388] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf0b8f61-40ef-4248-b37c-5d4f58c86ab0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.175447] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961144, 'name': ReconfigVM_Task, 'duration_secs': 0.809026} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.176727] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Reconfigured VM instance instance-00000034 to attach disk [datastore2] bd62c682-24f2-4559-887a-03186409f699/bd62c682-24f2-4559-887a-03186409f699.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.177568] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 798.177568] env[68217]: value = "task-2961147" [ 798.177568] env[68217]: _type = "Task" [ 798.177568] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.177757] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-475e760d-67fc-48a1-8e60-6e8c9d8aa23f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.187284] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961147, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.188643] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 798.188643] env[68217]: value = "task-2961148" [ 798.188643] env[68217]: _type = "Task" [ 798.188643] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.196856] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961148, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.296628] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.298362] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.298362] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.298362] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.298362] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.298362] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.298362] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 798.299022] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 798.518867] env[68217]: DEBUG nova.compute.utils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 798.520385] env[68217]: DEBUG nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 798.520564] env[68217]: DEBUG nova.network.neutron [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 798.600717] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961146, 'name': ReconfigVM_Task, 'duration_secs': 0.380407} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.603517] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Reconfigured VM instance instance-00000035 to attach disk [datastore2] 41d279f2-477b-44b2-9eb9-7b782c9c890f/41d279f2-477b-44b2-9eb9-7b782c9c890f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 798.605066] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4c93f88c-17eb-49fd-8d86-e85efa6439ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.611560] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 798.611560] env[68217]: value = "task-2961149" [ 798.611560] env[68217]: _type = "Task" [ 798.611560] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.622391] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961149, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.689275] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961147, 'name': CreateVM_Task, 'duration_secs': 0.373985} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.692352] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 798.693103] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.693514] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.694930] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 798.694930] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c9e78c4b-960a-41a4-bc53-e0f3fd018b2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.703421] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 798.703421] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52897d11-94b9-7693-c6c4-e11e112f09bf" [ 798.703421] env[68217]: _type = "Task" [ 798.703421] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.704190] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961148, 'name': Rename_Task, 'duration_secs': 0.196303} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.704190] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 798.707314] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c130d0b1-3c31-45e8-9c2a-7fecb8d7f057 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.719069] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52897d11-94b9-7693-c6c4-e11e112f09bf, 'name': SearchDatastore_Task, 'duration_secs': 0.013795} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 798.720536] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 798.721201] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 798.721201] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 798.721521] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 798.721521] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 798.721923] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 798.721923] env[68217]: value = "task-2961150" [ 798.721923] env[68217]: _type = "Task" [ 798.721923] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.724504] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-185393cf-c5f3-4135-9076-3a02e65e78f5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.734693] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961150, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.737045] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 798.737257] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 798.740457] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9d19be2e-b459-4a08-b73d-710f4bebd44d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.746212] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 798.746212] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5287ab58-42c9-ade7-bb35-c1636921e747" [ 798.746212] env[68217]: _type = "Task" [ 798.746212] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 798.754900] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5287ab58-42c9-ade7-bb35-c1636921e747, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 798.802406] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 798.894387] env[68217]: DEBUG nova.network.neutron [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Successfully updated port: b029c255-6f3f-41b3-ba5c-16ca2a968c6e {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 798.939203] env[68217]: DEBUG nova.policy [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8299c9d04a174d4d9ec6402e059b017b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0722d75c14c443c582966ee909ccfc2c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 798.966242] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d715c361-d091-4bb0-a0ca-160ddc61fc34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.977141] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ba2c2f8-b4fc-40d1-b9ad-0c3767d1fcf5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.012560] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74dbe58-e854-4e10-b1fe-3e8590a6488d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.017422] env[68217]: DEBUG nova.network.neutron [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Updating instance_info_cache with network_info: [{"id": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "address": "fa:16:3e:7f:0a:36", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0abd5109-c9", "ovs_interfaceid": "0abd5109-c94f-4eba-b6b2-ca8b28794157", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.024592] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3910d2d7-68fe-45da-8457-56da31009d08 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.032149] env[68217]: DEBUG nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 799.050784] env[68217]: DEBUG nova.compute.provider_tree [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 799.122766] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961149, 'name': Rename_Task, 'duration_secs': 0.191057} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.123107] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 799.123370] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c279c562-8aff-4af4-84ac-9dac46a7062c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.130323] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 799.130323] env[68217]: value = "task-2961151" [ 799.130323] env[68217]: _type = "Task" [ 799.130323] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.141195] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961151, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.237521] env[68217]: DEBUG oslo_vmware.api [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961150, 'name': PowerOnVM_Task, 'duration_secs': 0.456283} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.237821] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 799.238347] env[68217]: INFO nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Took 11.49 seconds to spawn the instance on the hypervisor. [ 799.238553] env[68217]: DEBUG nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.239466] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cd5c2ac-9001-43c3-98e0-f1cd5cd2df51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.258245] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5287ab58-42c9-ade7-bb35-c1636921e747, 'name': SearchDatastore_Task, 'duration_secs': 0.010809} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.259052] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-05c1c9f8-966c-427a-8fc7-54350ed94710 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.266401] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 799.266401] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d424f-ac7c-660f-ee83-511669a0f223" [ 799.266401] env[68217]: _type = "Task" [ 799.266401] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.274391] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d424f-ac7c-660f-ee83-511669a0f223, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.397173] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-ba39e563-3e3a-40aa-815f-760f0f37a55d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 799.397881] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-ba39e563-3e3a-40aa-815f-760f0f37a55d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 799.397881] env[68217]: DEBUG nova.network.neutron [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 799.406767] env[68217]: DEBUG nova.network.neutron [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Updated VIF entry in instance network info cache for port e14e7381-56d1-46d7-ac1f-e49cea827394. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 799.407137] env[68217]: DEBUG nova.network.neutron [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Updating instance_info_cache with network_info: [{"id": "e14e7381-56d1-46d7-ac1f-e49cea827394", "address": "fa:16:3e:40:05:71", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14e7381-56", "ovs_interfaceid": "e14e7381-56d1-46d7-ac1f-e49cea827394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 799.520805] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "refresh_cache-e550084b-84dd-4ae8-8667-2edb45b49e2b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.559029] env[68217]: DEBUG nova.scheduler.client.report [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 799.643474] env[68217]: DEBUG oslo_vmware.api [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961151, 'name': PowerOnVM_Task, 'duration_secs': 0.453605} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.643900] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 799.644064] env[68217]: INFO nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Took 7.19 seconds to spawn the instance on the hypervisor. [ 799.644294] env[68217]: DEBUG nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 799.645091] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976d23e7-8983-44b0-987a-0d82563a354e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.655764] env[68217]: DEBUG nova.network.neutron [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Successfully created port: 1301548f-a001-481f-8e30-4f1a5721d9a9 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 799.760953] env[68217]: INFO nova.compute.manager [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Took 40.02 seconds to build instance. [ 799.780622] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d424f-ac7c-660f-ee83-511669a0f223, 'name': SearchDatastore_Task, 'duration_secs': 0.020724} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 799.780665] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.781474] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 7a01c2c5-3108-4382-85c5-a5ea5e6e160c/7a01c2c5-3108-4382-85c5-a5ea5e6e160c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 799.781474] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b780a566-7b41-4a92-b8de-e2a7d6e9f64d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 799.788982] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 799.788982] env[68217]: value = "task-2961152" [ 799.788982] env[68217]: _type = "Task" [ 799.788982] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 799.799355] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961152, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 799.912039] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6f4da99-32ed-422a-8a71-032a1197a916 req-53f010c7-2ea1-4c7a-a68c-d4ec4825a8f6 service nova] Releasing lock "refresh_cache-7a01c2c5-3108-4382-85c5-a5ea5e6e160c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 799.940124] env[68217]: DEBUG nova.network.neutron [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 800.005338] env[68217]: DEBUG nova.compute.manager [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Received event network-vif-plugged-b029c255-6f3f-41b3-ba5c-16ca2a968c6e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 800.005469] env[68217]: DEBUG oslo_concurrency.lockutils [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] Acquiring lock "ba39e563-3e3a-40aa-815f-760f0f37a55d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 800.005657] env[68217]: DEBUG oslo_concurrency.lockutils [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.005827] env[68217]: DEBUG oslo_concurrency.lockutils [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.005989] env[68217]: DEBUG nova.compute.manager [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] No waiting events found dispatching network-vif-plugged-b029c255-6f3f-41b3-ba5c-16ca2a968c6e {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 800.006326] env[68217]: WARNING nova.compute.manager [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Received unexpected event network-vif-plugged-b029c255-6f3f-41b3-ba5c-16ca2a968c6e for instance with vm_state building and task_state spawning. [ 800.006559] env[68217]: DEBUG nova.compute.manager [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Received event network-changed-b029c255-6f3f-41b3-ba5c-16ca2a968c6e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 800.006725] env[68217]: DEBUG nova.compute.manager [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Refreshing instance network info cache due to event network-changed-b029c255-6f3f-41b3-ba5c-16ca2a968c6e. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 800.006888] env[68217]: DEBUG oslo_concurrency.lockutils [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] Acquiring lock "refresh_cache-ba39e563-3e3a-40aa-815f-760f0f37a55d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 800.043121] env[68217]: DEBUG nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 800.066947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.051s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.070950] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 800.071191] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 800.071344] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 800.071517] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 800.071659] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 800.071826] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 800.072086] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 800.072255] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 800.072425] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 800.072615] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 800.072814] env[68217]: DEBUG nova.virt.hardware [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 800.074008] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.459s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 800.077472] env[68217]: INFO nova.compute.claims [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 800.078111] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d43fe4a-8655-4c96-bff3-3d3791a70741 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.093588] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0daa7e2-f8d6-4d29-84cb-adc137ab0147 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.102645] env[68217]: INFO nova.scheduler.client.report [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Deleted allocations for instance 7ec30097-1151-4b0d-8226-e4d34ea7b3c9 [ 800.152410] env[68217]: DEBUG nova.network.neutron [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Updating instance_info_cache with network_info: [{"id": "b029c255-6f3f-41b3-ba5c-16ca2a968c6e", "address": "fa:16:3e:7d:95:c1", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb029c255-6f", "ovs_interfaceid": "b029c255-6f3f-41b3-ba5c-16ca2a968c6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 800.165745] env[68217]: INFO nova.compute.manager [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Took 36.08 seconds to build instance. [ 800.263500] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7116b50d-6e77-42fd-a744-3f8ec87716e4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.239s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.300038] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961152, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.607499] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e593648e-ddb2-4084-8055-feab664eb862 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "7ec30097-1151-4b0d-8226-e4d34ea7b3c9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.272s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.658027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-ba39e563-3e3a-40aa-815f-760f0f37a55d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 800.658027] env[68217]: DEBUG nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Instance network_info: |[{"id": "b029c255-6f3f-41b3-ba5c-16ca2a968c6e", "address": "fa:16:3e:7d:95:c1", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb029c255-6f", "ovs_interfaceid": "b029c255-6f3f-41b3-ba5c-16ca2a968c6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 800.658603] env[68217]: DEBUG oslo_concurrency.lockutils [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] Acquired lock "refresh_cache-ba39e563-3e3a-40aa-815f-760f0f37a55d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 800.658603] env[68217]: DEBUG nova.network.neutron [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Refreshing network info cache for port b029c255-6f3f-41b3-ba5c-16ca2a968c6e {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 800.662070] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:95:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b029c255-6f3f-41b3-ba5c-16ca2a968c6e', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 800.674545] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 800.674545] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f4aa591-ca11-46bd-be05-b79adaded3dc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 61.873s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 800.674545] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 800.675244] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18f33bf3-39e7-4d0c-99c9-f981a71fe1cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.697980] env[68217]: DEBUG nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.709187] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 800.709187] env[68217]: value = "task-2961153" [ 800.709187] env[68217]: _type = "Task" [ 800.709187] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.721060] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961153, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 800.766356] env[68217]: DEBUG nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 800.805205] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961152, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.998183} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 800.805205] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 7a01c2c5-3108-4382-85c5-a5ea5e6e160c/7a01c2c5-3108-4382-85c5-a5ea5e6e160c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 800.805205] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 800.805205] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-269d1d1e-0608-4c06-a392-9ed7cdf54e16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.813948] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 800.813948] env[68217]: value = "task-2961154" [ 800.813948] env[68217]: _type = "Task" [ 800.813948] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 800.825534] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961154, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.091882] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.094226] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d45d3f20-4206-4b91-b3cb-be1fa40888b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.102490] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 801.102490] env[68217]: value = "task-2961155" [ 801.102490] env[68217]: _type = "Task" [ 801.102490] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.116744] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.226431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.234090] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961153, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.286730] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.330618] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961154, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.111337} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.330896] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 801.333628] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df827e7d-c360-4e42-84d0-d9063cb7d27a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.362776] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Reconfiguring VM instance instance-00000036 to attach disk [datastore2] 7a01c2c5-3108-4382-85c5-a5ea5e6e160c/7a01c2c5-3108-4382-85c5-a5ea5e6e160c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 801.368235] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a968dc8-eb83-4564-8204-1bfa59481b1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.393882] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 801.393882] env[68217]: value = "task-2961156" [ 801.393882] env[68217]: _type = "Task" [ 801.393882] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.403388] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961156, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.573989] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8cd253d-5a8a-4506-a80a-44faecfb7d5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.589670] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9a258b9-1ac6-4763-8efb-d971e4831c54 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.632446] env[68217]: DEBUG nova.network.neutron [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Successfully updated port: 1301548f-a001-481f-8e30-4f1a5721d9a9 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 801.640178] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c30207d-4d10-4128-884f-408d6382dc64 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.650404] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961155, 'name': PowerOffVM_Task, 'duration_secs': 0.355135} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.653502] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 801.658168] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d207d5b-85b6-4638-ab5a-d434b816f1ce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.661652] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c962d4-0c5f-4e96-8a91-17af88bd6732 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.678019] env[68217]: DEBUG nova.compute.provider_tree [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 801.697608] env[68217]: DEBUG nova.scheduler.client.report [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 801.700562] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-347fbc98-d7ea-4db5-8ed9-9e4b0cbca7cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.713957] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.714274] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.714496] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 801.714793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 801.716178] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 801.718283] env[68217]: INFO nova.compute.manager [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Terminating instance [ 801.720730] env[68217]: DEBUG nova.network.neutron [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Updated VIF entry in instance network info cache for port b029c255-6f3f-41b3-ba5c-16ca2a968c6e. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 801.721046] env[68217]: DEBUG nova.network.neutron [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Updating instance_info_cache with network_info: [{"id": "b029c255-6f3f-41b3-ba5c-16ca2a968c6e", "address": "fa:16:3e:7d:95:c1", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb029c255-6f", "ovs_interfaceid": "b029c255-6f3f-41b3-ba5c-16ca2a968c6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 801.732917] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961153, 'name': CreateVM_Task, 'duration_secs': 0.550473} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 801.733084] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 801.733875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.733934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.734244] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 801.737245] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77b56366-e5e8-452e-bbec-8c7939553dc3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.750039] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 801.750039] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52617457-e371-e156-7fca-f8c5dc59da50" [ 801.750039] env[68217]: _type = "Task" [ 801.750039] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.755348] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 801.756353] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c21b6495-9c8d-4850-a012-cab5025b6a76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.761422] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52617457-e371-e156-7fca-f8c5dc59da50, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.769436] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 801.769436] env[68217]: value = "task-2961157" [ 801.769436] env[68217]: _type = "Task" [ 801.769436] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.780882] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 801.781170] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 801.781510] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 801.781733] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 801.782032] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 801.782321] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c5b7fa8f-2d9e-499c-b8ea-1fcd495d958a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.794037] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 801.794037] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 801.794189] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4565ade2-7562-4df6-93b2-e35c20276918 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 801.806174] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 801.806174] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52838519-b222-7eca-acc1-9efaa94cb01e" [ 801.806174] env[68217]: _type = "Task" [ 801.806174] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 801.818441] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52838519-b222-7eca-acc1-9efaa94cb01e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 801.905250] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961156, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.031787] env[68217]: DEBUG nova.compute.manager [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Received event network-vif-plugged-1301548f-a001-481f-8e30-4f1a5721d9a9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.031999] env[68217]: DEBUG oslo_concurrency.lockutils [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] Acquiring lock "e642c93b-ca48-4d23-9abb-ff243855d8d0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.032238] env[68217]: DEBUG oslo_concurrency.lockutils [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.032420] env[68217]: DEBUG oslo_concurrency.lockutils [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.032589] env[68217]: DEBUG nova.compute.manager [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] No waiting events found dispatching network-vif-plugged-1301548f-a001-481f-8e30-4f1a5721d9a9 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 802.032854] env[68217]: WARNING nova.compute.manager [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Received unexpected event network-vif-plugged-1301548f-a001-481f-8e30-4f1a5721d9a9 for instance with vm_state building and task_state spawning. [ 802.033064] env[68217]: DEBUG nova.compute.manager [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Received event network-changed-1301548f-a001-481f-8e30-4f1a5721d9a9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 802.033239] env[68217]: DEBUG nova.compute.manager [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Refreshing instance network info cache due to event network-changed-1301548f-a001-481f-8e30-4f1a5721d9a9. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 802.033430] env[68217]: DEBUG oslo_concurrency.lockutils [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] Acquiring lock "refresh_cache-e642c93b-ca48-4d23-9abb-ff243855d8d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.033585] env[68217]: DEBUG oslo_concurrency.lockutils [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] Acquired lock "refresh_cache-e642c93b-ca48-4d23-9abb-ff243855d8d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.033750] env[68217]: DEBUG nova.network.neutron [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Refreshing network info cache for port 1301548f-a001-481f-8e30-4f1a5721d9a9 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 802.055081] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee1a2bfe-329f-485e-a44e-7a086dcd0701 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.063391] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-78ff8de6-92bb-4024-a485-58fe2987085d tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Suspending the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 802.063660] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-26b244f4-bdbe-4970-a5cc-d934f658e7e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.073530] env[68217]: DEBUG oslo_vmware.api [None req-78ff8de6-92bb-4024-a485-58fe2987085d tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 802.073530] env[68217]: value = "task-2961158" [ 802.073530] env[68217]: _type = "Task" [ 802.073530] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.086218] env[68217]: DEBUG oslo_vmware.api [None req-78ff8de6-92bb-4024-a485-58fe2987085d tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961158, 'name': SuspendVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.145304] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "refresh_cache-e642c93b-ca48-4d23-9abb-ff243855d8d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.205698] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.132s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 802.206236] env[68217]: DEBUG nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 802.209415] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.485s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 802.209653] env[68217]: DEBUG nova.objects.instance [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lazy-loading 'resources' on Instance uuid 156ea1ad-6e52-4848-915d-7ba74c606e6e {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 802.227247] env[68217]: DEBUG oslo_concurrency.lockutils [req-fc42970d-8f97-492d-802e-2e5ca6c44171 req-ff0a57a2-4577-4a12-a538-61397945eb29 service nova] Releasing lock "refresh_cache-ba39e563-3e3a-40aa-815f-760f0f37a55d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.228044] env[68217]: DEBUG nova.compute.manager [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 802.228283] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 802.229516] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9326471-d065-4a74-90b0-6393ead1462c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.239498] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 802.239835] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fd9e3fb7-51af-4a3c-8468-dc2b13399c52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.254264] env[68217]: DEBUG oslo_vmware.api [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 802.254264] env[68217]: value = "task-2961159" [ 802.254264] env[68217]: _type = "Task" [ 802.254264] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.254852] env[68217]: DEBUG nova.compute.manager [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 802.269608] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52617457-e371-e156-7fca-f8c5dc59da50, 'name': SearchDatastore_Task, 'duration_secs': 0.01232} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.273251] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.273497] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 802.273758] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 802.273954] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 802.274309] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 802.274964] env[68217]: DEBUG oslo_vmware.api [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961159, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.275243] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff6fd76a-0f13-49e3-bdd7-8609cdf23649 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.286080] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 802.286303] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 802.287106] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1021b3d8-46e1-42d1-afbc-9e358365a4da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.294869] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 802.294869] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c5ddca-95dc-0fea-96e4-4a941141a512" [ 802.294869] env[68217]: _type = "Task" [ 802.294869] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.305272] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c5ddca-95dc-0fea-96e4-4a941141a512, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.316957] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52838519-b222-7eca-acc1-9efaa94cb01e, 'name': SearchDatastore_Task, 'duration_secs': 0.012884} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.317827] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54fe8049-642c-4b2b-a5c1-109c482f1370 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.325947] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 802.325947] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5289e2ba-ae5c-12b1-43d9-3ed156851b38" [ 802.325947] env[68217]: _type = "Task" [ 802.325947] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.337299] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5289e2ba-ae5c-12b1-43d9-3ed156851b38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.406223] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961156, 'name': ReconfigVM_Task, 'duration_secs': 0.749077} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.406557] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Reconfigured VM instance instance-00000036 to attach disk [datastore2] 7a01c2c5-3108-4382-85c5-a5ea5e6e160c/7a01c2c5-3108-4382-85c5-a5ea5e6e160c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 802.407294] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-607915d2-195d-4375-a1f4-8a67045516e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.426434] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 802.426434] env[68217]: value = "task-2961160" [ 802.426434] env[68217]: _type = "Task" [ 802.426434] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.437524] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961160, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.585383] env[68217]: DEBUG oslo_vmware.api [None req-78ff8de6-92bb-4024-a485-58fe2987085d tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961158, 'name': SuspendVM_Task} progress is 62%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.593028] env[68217]: DEBUG nova.network.neutron [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 802.685377] env[68217]: DEBUG nova.network.neutron [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 802.713348] env[68217]: DEBUG nova.compute.utils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 802.719744] env[68217]: DEBUG nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 802.719744] env[68217]: DEBUG nova.network.neutron [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 802.774501] env[68217]: DEBUG oslo_vmware.api [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961159, 'name': PowerOffVM_Task, 'duration_secs': 0.203007} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.774501] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 802.774501] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 802.775550] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3a47791f-5434-4d8c-9a50-856bba4aa3b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.778331] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 802.813368] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c5ddca-95dc-0fea-96e4-4a941141a512, 'name': SearchDatastore_Task, 'duration_secs': 0.014668} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.813368] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f0b7d88b-44f9-4e30-933f-2de2eff20479 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.822682] env[68217]: DEBUG nova.policy [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0939a9bd52d142818e49fbf0c576e4a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd093c295105c44cca8bd67bd514429d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 802.825907] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 802.825907] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523f917f-3e52-da2d-a497-b6bb55ab5f41" [ 802.825907] env[68217]: _type = "Task" [ 802.825907] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.846606] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5289e2ba-ae5c-12b1-43d9-3ed156851b38, 'name': SearchDatastore_Task, 'duration_secs': 0.019773} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.846606] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523f917f-3e52-da2d-a497-b6bb55ab5f41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.850202] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 802.850202] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. {{(pid=68217) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 802.851791] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2002cfcc-6259-41f5-96b2-fa6c70b06ec4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.854272] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 802.854680] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 802.855080] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Deleting the datastore file [datastore1] dfeeed37-8c84-4ecc-87ea-f4239f512fb1 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 802.855262] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a5e54346-a080-4dcc-84c6-b5d21cdc5e0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.867511] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 802.867511] env[68217]: value = "task-2961162" [ 802.867511] env[68217]: _type = "Task" [ 802.867511] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.867907] env[68217]: DEBUG oslo_vmware.api [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for the task: (returnval){ [ 802.867907] env[68217]: value = "task-2961163" [ 802.867907] env[68217]: _type = "Task" [ 802.867907] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.882342] env[68217]: DEBUG oslo_vmware.api [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.887757] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961162, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 802.940182] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961160, 'name': Rename_Task, 'duration_secs': 0.185711} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 802.940357] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 802.940630] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2030e99-85a1-428e-9eb3-316c7266cdc5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.948134] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 802.948134] env[68217]: value = "task-2961164" [ 802.948134] env[68217]: _type = "Task" [ 802.948134] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 802.957514] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.084900] env[68217]: DEBUG oslo_vmware.api [None req-78ff8de6-92bb-4024-a485-58fe2987085d tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961158, 'name': SuspendVM_Task, 'duration_secs': 0.661395} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.085182] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-78ff8de6-92bb-4024-a485-58fe2987085d tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Suspended the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 803.085415] env[68217]: DEBUG nova.compute.manager [None req-78ff8de6-92bb-4024-a485-58fe2987085d tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.086148] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831a8152-136b-44cf-b14a-c2f20ea30a6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.166311] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68e8f091-fbd4-4b81-86b7-e5320f0540b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.174835] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2e1da32-5d51-4a0a-af89-c569af35b8c8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.211172] env[68217]: DEBUG oslo_concurrency.lockutils [req-00e70a1e-62ce-48bf-982a-4555c148a71a req-0abf0ed2-6e79-401b-8580-9e06dc90313d service nova] Releasing lock "refresh_cache-e642c93b-ca48-4d23-9abb-ff243855d8d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.211172] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquired lock "refresh_cache-e642c93b-ca48-4d23-9abb-ff243855d8d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 803.211172] env[68217]: DEBUG nova.network.neutron [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 803.212753] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177ef1fb-812d-42ac-ab0e-044ded4e04d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.220639] env[68217]: DEBUG nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 803.226337] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366707e9-fc5f-4195-88c5-b957fd66241c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.243299] env[68217]: DEBUG nova.compute.provider_tree [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 803.338993] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523f917f-3e52-da2d-a497-b6bb55ab5f41, 'name': SearchDatastore_Task, 'duration_secs': 0.026275} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.339346] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 803.339655] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 803.340010] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6046069d-b1aa-4327-9de0-3460475b3526 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.349308] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 803.349308] env[68217]: value = "task-2961165" [ 803.349308] env[68217]: _type = "Task" [ 803.349308] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.350152] env[68217]: DEBUG nova.network.neutron [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Successfully created port: 21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 803.361167] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961165, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.384771] env[68217]: DEBUG oslo_vmware.api [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961163, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.386044] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961162, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.458996] env[68217]: DEBUG oslo_vmware.api [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961164, 'name': PowerOnVM_Task, 'duration_secs': 0.494157} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.459356] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 803.459522] env[68217]: INFO nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Took 8.46 seconds to spawn the instance on the hypervisor. [ 803.459701] env[68217]: DEBUG nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 803.460516] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b57cce70-724f-4bd7-8179-e6c45705c2ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.747192] env[68217]: DEBUG nova.scheduler.client.report [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 803.781048] env[68217]: DEBUG nova.network.neutron [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 803.865711] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961165, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.881931] env[68217]: DEBUG oslo_vmware.api [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Task: {'id': task-2961163, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.639978} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 803.885419] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 803.885734] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 803.885900] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 803.886179] env[68217]: INFO nova.compute.manager [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Took 1.66 seconds to destroy the instance on the hypervisor. [ 803.886468] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 803.886784] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961162, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.887111] env[68217]: DEBUG nova.compute.manager [-] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 803.887306] env[68217]: DEBUG nova.network.neutron [-] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 803.979316] env[68217]: INFO nova.compute.manager [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Took 39.31 seconds to build instance. [ 804.151532] env[68217]: DEBUG nova.network.neutron [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Updating instance_info_cache with network_info: [{"id": "1301548f-a001-481f-8e30-4f1a5721d9a9", "address": "fa:16:3e:08:8e:d7", "network": {"id": "9ccb6b88-9f15-4ce6-9752-f8a3de1cfae9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1788399600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0722d75c14c443c582966ee909ccfc2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1301548f-a0", "ovs_interfaceid": "1301548f-a001-481f-8e30-4f1a5721d9a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.240252] env[68217]: DEBUG nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 804.252363] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.043s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.254720] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.157s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 804.261327] env[68217]: INFO nova.compute.claims [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 804.278978] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 804.279237] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 804.279392] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 804.279570] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 804.279717] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 804.280300] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 804.280657] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 804.280803] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 804.281099] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 804.281309] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 804.281966] env[68217]: DEBUG nova.virt.hardware [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 804.282688] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a717014-bd0f-403b-a0a3-b679f1b0145a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.289269] env[68217]: INFO nova.scheduler.client.report [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleted allocations for instance 156ea1ad-6e52-4848-915d-7ba74c606e6e [ 804.296142] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d012b63b-2e40-48bd-be5c-4ae913eac358 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.364179] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961165, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.912901} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.365118] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 804.365277] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 804.365454] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ae59e250-baa0-4328-87d6-3843f8aa3fb2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.374573] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 804.374573] env[68217]: value = "task-2961166" [ 804.374573] env[68217]: _type = "Task" [ 804.374573] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.381523] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961162, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.0285} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.382443] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. [ 804.382982] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae22a6e-f539-432f-9119-d5b2cdf01ce4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.388312] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961166, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.415137] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Reconfiguring VM instance instance-00000033 to attach disk [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.415471] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7446ad9-a8a3-41df-857c-806cb9a0e245 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.437267] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 804.437267] env[68217]: value = "task-2961167" [ 804.437267] env[68217]: _type = "Task" [ 804.437267] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.447378] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961167, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.482771] env[68217]: DEBUG oslo_concurrency.lockutils [None req-00604f54-3443-4029-84fc-72b4590a2dbb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.508s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.500323] env[68217]: DEBUG nova.compute.manager [req-69aa5448-c68c-4ebd-bbd0-2d9bd2790d35 req-d6df6ebf-69e3-4613-af6b-9c92ecc8700c service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Received event network-vif-deleted-683c092b-4729-4946-9f3a-b14200be8d7c {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 804.500456] env[68217]: INFO nova.compute.manager [req-69aa5448-c68c-4ebd-bbd0-2d9bd2790d35 req-d6df6ebf-69e3-4613-af6b-9c92ecc8700c service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Neutron deleted interface 683c092b-4729-4946-9f3a-b14200be8d7c; detaching it from the instance and deleting it from the info cache [ 804.500796] env[68217]: DEBUG nova.network.neutron [req-69aa5448-c68c-4ebd-bbd0-2d9bd2790d35 req-d6df6ebf-69e3-4613-af6b-9c92ecc8700c service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.655196] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Releasing lock "refresh_cache-e642c93b-ca48-4d23-9abb-ff243855d8d0" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 804.655635] env[68217]: DEBUG nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Instance network_info: |[{"id": "1301548f-a001-481f-8e30-4f1a5721d9a9", "address": "fa:16:3e:08:8e:d7", "network": {"id": "9ccb6b88-9f15-4ce6-9752-f8a3de1cfae9", "bridge": "br-int", "label": "tempest-ServerPasswordTestJSON-1788399600-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0722d75c14c443c582966ee909ccfc2c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1f996252-e329-42bd-a897-446dfe2b81cd", "external-id": "nsx-vlan-transportzone-535", "segmentation_id": 535, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1301548f-a0", "ovs_interfaceid": "1301548f-a001-481f-8e30-4f1a5721d9a9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 804.656395] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:8e:d7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1f996252-e329-42bd-a897-446dfe2b81cd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1301548f-a001-481f-8e30-4f1a5721d9a9', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 804.663648] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Creating folder: Project (0722d75c14c443c582966ee909ccfc2c). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 804.663931] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef07fed9-3c6c-4381-887c-2dd61d9be24d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.678029] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Created folder: Project (0722d75c14c443c582966ee909ccfc2c) in parent group-v594094. [ 804.678255] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Creating folder: Instances. Parent ref: group-v594255. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 804.678536] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4c174e23-bcb8-4f34-bf4e-a4634e5ec976 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.691157] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Created folder: Instances in parent group-v594255. [ 804.691432] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 804.691710] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 804.692290] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-062d609d-6a7c-4ba4-9fa6-c0317b407352 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.714320] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 804.714320] env[68217]: value = "task-2961170" [ 804.714320] env[68217]: _type = "Task" [ 804.714320] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.725271] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961170, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.805900] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9bc4d63c-a1fb-4048-9b3f-fc30f7bd02ac tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "156ea1ad-6e52-4848-915d-7ba74c606e6e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.322s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 804.845693] env[68217]: DEBUG nova.network.neutron [-] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.885801] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961166, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.223107} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.886137] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 804.886926] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a71888f-f012-495a-9982-8605c973c87a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.912014] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 804.913459] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-564debc2-2d71-40a2-bf64-ed93047651e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.938703] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 804.938703] env[68217]: value = "task-2961171" [ 804.938703] env[68217]: _type = "Task" [ 804.938703] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 804.952892] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961167, 'name': ReconfigVM_Task, 'duration_secs': 0.338439} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.956105] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Reconfigured VM instance instance-00000033 to attach disk [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 804.956486] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961171, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.957203] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3be1b2fa-dbf7-4b39-b3ad-116b5b047bae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.982992] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c70f58c4-6777-4d3e-9f12-fb4b976c11a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.993266] env[68217]: DEBUG nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 805.002116] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 805.002116] env[68217]: value = "task-2961172" [ 805.002116] env[68217]: _type = "Task" [ 805.002116] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.008081] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-de5282e4-a3bd-4a72-8579-174e94f32a8a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.016438] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961172, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.020272] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bce05cd-304e-46ab-9fcf-6df41fa72c72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.058092] env[68217]: DEBUG nova.compute.manager [req-69aa5448-c68c-4ebd-bbd0-2d9bd2790d35 req-d6df6ebf-69e3-4613-af6b-9c92ecc8700c service nova] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Detach interface failed, port_id=683c092b-4729-4946-9f3a-b14200be8d7c, reason: Instance dfeeed37-8c84-4ecc-87ea-f4239f512fb1 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 805.224537] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961170, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.299623] env[68217]: DEBUG nova.network.neutron [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Successfully updated port: 21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 805.349524] env[68217]: INFO nova.compute.manager [-] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Took 1.46 seconds to deallocate network for instance. [ 805.455211] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961171, 'name': ReconfigVM_Task, 'duration_secs': 0.309632} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.455495] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Reconfigured VM instance instance-00000037 to attach disk [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 805.456244] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4603e92d-389d-4d32-ac9b-7f772182b860 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.466277] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 805.466277] env[68217]: value = "task-2961173" [ 805.466277] env[68217]: _type = "Task" [ 805.466277] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.480881] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961173, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.516558] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961172, 'name': ReconfigVM_Task, 'duration_secs': 0.168224} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.517632] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.517947] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 805.518336] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-087ccd56-ab19-4da1-b590-5743a8a84354 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.529373] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 805.529373] env[68217]: value = "task-2961174" [ 805.529373] env[68217]: _type = "Task" [ 805.529373] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.539554] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961174, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.650057] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.650343] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 805.709427] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d61057-ef37-417e-9794-abd7976b9b0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.723348] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ba041f4-5278-4602-8176-2ffbff0c6a37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.733600] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961170, 'name': CreateVM_Task, 'duration_secs': 0.875499} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.757324] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 805.758615] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.758783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.759168] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 805.759907] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d04f9d1-2c4e-48bd-a1d1-fef823bed0b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.762479] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4af59423-1e10-4087-b507-f3de9be34907 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.767849] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 805.767849] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d110bd-05e7-5404-112e-babd766d8f51" [ 805.767849] env[68217]: _type = "Task" [ 805.767849] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 805.777024] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac8d7e4-7a8a-4a8d-9657-95fc261dbb3d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.783146] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d110bd-05e7-5404-112e-babd766d8f51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 805.791300] env[68217]: DEBUG nova.compute.provider_tree [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.802037] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 805.802260] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 805.802471] env[68217]: DEBUG nova.network.neutron [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 805.856105] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 805.935342] env[68217]: DEBUG nova.compute.manager [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 805.936309] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc4ffd9f-189e-4af8-9bb6-437674830612 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.977689] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961173, 'name': Rename_Task, 'duration_secs': 0.21227} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 805.978787] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 805.979076] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79830b68-a072-4ed4-ac57-5aa4238528f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.988439] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 805.988439] env[68217]: value = "task-2961175" [ 805.988439] env[68217]: _type = "Task" [ 805.988439] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.000462] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961175, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.039309] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961174, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.153232] env[68217]: DEBUG nova.compute.utils [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 806.280769] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d110bd-05e7-5404-112e-babd766d8f51, 'name': SearchDatastore_Task, 'duration_secs': 0.028003} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.281781] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 806.282064] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 806.282343] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.282691] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 806.282917] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 806.283195] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c0dc091-35d7-4312-b30d-e039841b2519 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.294277] env[68217]: DEBUG nova.scheduler.client.report [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 806.297540] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 806.298170] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 806.298924] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7220c8b9-c7ee-47a0-a70a-20b9f6bcbbf7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.307818] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 806.307818] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5215fce3-f901-74b6-dd0a-2d4da55b51d4" [ 806.307818] env[68217]: _type = "Task" [ 806.307818] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.317590] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5215fce3-f901-74b6-dd0a-2d4da55b51d4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.347637] env[68217]: DEBUG nova.network.neutron [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 806.449053] env[68217]: INFO nova.compute.manager [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] instance snapshotting [ 806.449297] env[68217]: WARNING nova.compute.manager [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] trying to snapshot a non-running instance: (state: 7 expected: 1) [ 806.454536] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c393f0-17c5-475e-a9ba-1feb9951f519 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.475337] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261e163b-7957-4732-be06-694db19f5356 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.499040] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961175, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.538664] env[68217]: DEBUG oslo_vmware.api [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961174, 'name': PowerOnVM_Task, 'duration_secs': 0.514162} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.539529] env[68217]: DEBUG nova.network.neutron [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updating instance_info_cache with network_info: [{"id": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "address": "fa:16:3e:d4:23:d4", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21f37b3b-0b", "ovs_interfaceid": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.540718] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 806.543369] env[68217]: DEBUG nova.compute.manager [None req-7548cfaa-3ea6-4dae-9889-f6b4aa538716 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 806.544172] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-349a36c6-ecad-44e7-8ab7-2961eb06a40d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.656794] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.800074] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.545s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.800601] env[68217]: DEBUG nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 806.803940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.591s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.806866] env[68217]: INFO nova.compute.claims [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 806.823306] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5215fce3-f901-74b6-dd0a-2d4da55b51d4, 'name': SearchDatastore_Task, 'duration_secs': 0.018406} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 806.824462] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0e4b9e35-50d5-48aa-b7fa-7dc2d7a867c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.833149] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 806.833149] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d461ba-4dc2-e6c3-0d53-c3ddfff378d8" [ 806.833149] env[68217]: _type = "Task" [ 806.833149] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 806.842177] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d461ba-4dc2-e6c3-0d53-c3ddfff378d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 806.849020] env[68217]: DEBUG nova.compute.manager [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Received event network-vif-plugged-21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.849343] env[68217]: DEBUG oslo_concurrency.lockutils [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] Acquiring lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 806.849756] env[68217]: DEBUG oslo_concurrency.lockutils [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 806.849951] env[68217]: DEBUG oslo_concurrency.lockutils [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 806.850184] env[68217]: DEBUG nova.compute.manager [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] No waiting events found dispatching network-vif-plugged-21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 806.850360] env[68217]: WARNING nova.compute.manager [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Received unexpected event network-vif-plugged-21f37b3b-0b0a-412e-8413-f3a1967f5c79 for instance with vm_state building and task_state spawning. [ 806.850513] env[68217]: DEBUG nova.compute.manager [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Received event network-changed-21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 806.850666] env[68217]: DEBUG nova.compute.manager [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Refreshing instance network info cache due to event network-changed-21f37b3b-0b0a-412e-8413-f3a1967f5c79. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 806.850831] env[68217]: DEBUG oslo_concurrency.lockutils [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] Acquiring lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 806.987048] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 806.988061] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0a040463-4d50-4ade-9cef-e5f7fe1dd791 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.000914] env[68217]: DEBUG oslo_vmware.api [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961175, 'name': PowerOnVM_Task, 'duration_secs': 0.846103} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.002425] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 807.002637] env[68217]: INFO nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Took 9.50 seconds to spawn the instance on the hypervisor. [ 807.002815] env[68217]: DEBUG nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 807.003171] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 807.003171] env[68217]: value = "task-2961176" [ 807.003171] env[68217]: _type = "Task" [ 807.003171] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.003875] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-034fbd4e-879e-4e8d-9f03-4e282bcdb957 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.016714] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961176, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.044140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.044582] env[68217]: DEBUG nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Instance network_info: |[{"id": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "address": "fa:16:3e:d4:23:d4", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21f37b3b-0b", "ovs_interfaceid": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 807.044810] env[68217]: DEBUG oslo_concurrency.lockutils [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] Acquired lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.044986] env[68217]: DEBUG nova.network.neutron [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Refreshing network info cache for port 21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 807.046275] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d4:23:d4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '21f37b3b-0b0a-412e-8413-f3a1967f5c79', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.053980] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating folder: Project (d093c295105c44cca8bd67bd514429d1). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.057262] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5375b858-3940-4f0b-8dd4-9051fb6cb44a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.075964] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created folder: Project (d093c295105c44cca8bd67bd514429d1) in parent group-v594094. [ 807.076295] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating folder: Instances. Parent ref: group-v594258. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 807.077131] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6be5951a-0cc7-4192-a5b7-a1234a830cb7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.089147] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created folder: Instances in parent group-v594258. [ 807.089441] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 807.089648] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 807.089864] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-480d6f6f-668f-45ba-a64d-e5ea66e23eb1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.115079] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.115079] env[68217]: value = "task-2961179" [ 807.115079] env[68217]: _type = "Task" [ 807.115079] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.123410] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961179, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.314045] env[68217]: DEBUG nova.compute.utils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 807.318922] env[68217]: DEBUG nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 807.318922] env[68217]: DEBUG nova.network.neutron [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 807.347155] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d461ba-4dc2-e6c3-0d53-c3ddfff378d8, 'name': SearchDatastore_Task, 'duration_secs': 0.017286} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.349776] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 807.350329] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] e642c93b-ca48-4d23-9abb-ff243855d8d0/e642c93b-ca48-4d23-9abb-ff243855d8d0.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 807.350935] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-57b66034-5bd3-417f-a45e-d2b0762e0efe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.362210] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 807.362210] env[68217]: value = "task-2961180" [ 807.362210] env[68217]: _type = "Task" [ 807.362210] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.374674] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961180, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.443536] env[68217]: DEBUG nova.network.neutron [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updated VIF entry in instance network info cache for port 21f37b3b-0b0a-412e-8413-f3a1967f5c79. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 807.443950] env[68217]: DEBUG nova.network.neutron [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updating instance_info_cache with network_info: [{"id": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "address": "fa:16:3e:d4:23:d4", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21f37b3b-0b", "ovs_interfaceid": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.475584] env[68217]: DEBUG nova.policy [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f38774f9019a474c9f792e39802d969d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '118adafdba384a499099a282bdaac85e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 807.520060] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961176, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.533948] env[68217]: INFO nova.compute.manager [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Took 38.25 seconds to build instance. [ 807.624823] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961179, 'name': CreateVM_Task, 'duration_secs': 0.392544} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.625031] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 807.625763] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.625944] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 807.626279] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 807.626547] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25e83ac3-08f5-4b49-b108-3fc1cd2ba171 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.634823] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 807.634823] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523fe61a-79b0-0575-320a-6ec325c5830a" [ 807.634823] env[68217]: _type = "Task" [ 807.634823] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.644545] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523fe61a-79b0-0575-320a-6ec325c5830a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.733508] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 807.733775] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 807.734038] env[68217]: INFO nova.compute.manager [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Attaching volume d5de6532-0a92-4a7a-b66b-09e2c100b998 to /dev/sdb [ 807.775845] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dc8ed20-2916-4141-8193-e87ef5f80d0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.786147] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303e1423-cb78-40a5-976d-4132dfecb39a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.805300] env[68217]: DEBUG nova.virt.block_device [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Updating existing volume attachment record: 49d2044d-27be-4b09-8041-90ec4389f457 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 807.817671] env[68217]: DEBUG nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 807.874480] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961180, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.946539] env[68217]: DEBUG oslo_concurrency.lockutils [req-5ea99a19-889e-4687-b382-2224d62ce925 req-1b5687f3-362a-43eb-ae57-83a61e780793 service nova] Releasing lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.017320] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961176, 'name': CreateSnapshot_Task, 'duration_secs': 0.814968} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.017741] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 808.020934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f5cc11a-8544-4073-acde-fccd172944f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.037792] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0a8ed46d-472c-4268-a93e-34794b4b2a87 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 65.218s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 808.153807] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523fe61a-79b0-0575-320a-6ec325c5830a, 'name': SearchDatastore_Task, 'duration_secs': 0.058025} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.154128] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 808.154389] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.154633] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.154778] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 808.154984] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 808.158422] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a0fe60fd-3f15-492b-a60e-6fefa724ba33 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.168605] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 808.168910] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 808.169635] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2add7a09-8c51-4b47-8a3c-bbe6a96caba0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.182067] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 808.182067] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226cbb9-99cb-b63a-46f6-131059b1ac4e" [ 808.182067] env[68217]: _type = "Task" [ 808.182067] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.194643] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226cbb9-99cb-b63a-46f6-131059b1ac4e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.198170] env[68217]: DEBUG nova.network.neutron [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Successfully created port: 96bf6c1f-33b5-4589-b488-c5be8d5892c6 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.288029] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a30ab6b2-9b02-4def-8b53-6094fef4e1df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.296568] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd442874-d847-4228-85a9-f26fda4dfe13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.341679] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96268e4d-261f-483f-a3e2-5829d1a5b992 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.352999] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58526721-7915-4426-bbf7-4de737825279 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.369987] env[68217]: DEBUG nova.compute.provider_tree [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.382939] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961180, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.55235} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.382939] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] e642c93b-ca48-4d23-9abb-ff243855d8d0/e642c93b-ca48-4d23-9abb-ff243855d8d0.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 808.382939] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 808.383136] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b72cc7ae-f419-4959-9b63-d65fe375c620 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.393204] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 808.393204] env[68217]: value = "task-2961184" [ 808.393204] env[68217]: _type = "Task" [ 808.393204] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.408307] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961184, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.542806] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 808.543331] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-3cb24547-af12-47f5-8831-0fa80f962304 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.553027] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 808.553027] env[68217]: value = "task-2961185" [ 808.553027] env[68217]: _type = "Task" [ 808.553027] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.561972] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961185, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.695045] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5226cbb9-99cb-b63a-46f6-131059b1ac4e, 'name': SearchDatastore_Task, 'duration_secs': 0.011975} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.696295] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e68865dc-34ba-4663-bf07-ad6a1ffb4413 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.704442] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 808.704442] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526d4922-586b-ff58-ff95-2c1c144e3568" [ 808.704442] env[68217]: _type = "Task" [ 808.704442] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.714976] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526d4922-586b-ff58-ff95-2c1c144e3568, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.749878] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "09290e60-7751-408e-9d6d-20e7cb61767b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 808.749878] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "09290e60-7751-408e-9d6d-20e7cb61767b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 808.842770] env[68217]: DEBUG nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 808.866884] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 808.867343] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 808.867614] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 808.867889] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 808.868151] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 808.868415] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 808.868780] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 808.869042] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 808.869340] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 808.869637] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 808.870033] env[68217]: DEBUG nova.virt.hardware [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 808.871025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e8d7af5-ef08-4230-aa57-e836c98deb38 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.880319] env[68217]: DEBUG nova.scheduler.client.report [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 808.886069] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb42fad-32ac-4a02-af0c-607d46f6e24e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.912904] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961184, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076227} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 808.913572] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 808.914456] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-462a903e-f100-47cf-8780-9806b06efac6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.940537] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Reconfiguring VM instance instance-00000038 to attach disk [datastore1] e642c93b-ca48-4d23-9abb-ff243855d8d0/e642c93b-ca48-4d23-9abb-ff243855d8d0.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 808.942100] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e5dc5fd-de97-49b7-893d-a83295d65b34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.967681] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 808.967681] env[68217]: value = "task-2961186" [ 808.967681] env[68217]: _type = "Task" [ 808.967681] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 808.978731] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.063788] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961185, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.215685] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526d4922-586b-ff58-ff95-2c1c144e3568, 'name': SearchDatastore_Task, 'duration_secs': 0.018373} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.215955] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 809.216237] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a86015ea-fa6b-4cf8-9d79-273ffa02ec23/a86015ea-fa6b-4cf8-9d79-273ffa02ec23.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 809.216494] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-df69d471-0f3a-4b53-b32e-3c8f4b6e2d0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.224736] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 809.224736] env[68217]: value = "task-2961187" [ 809.224736] env[68217]: _type = "Task" [ 809.224736] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 809.233213] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961187, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.251384] env[68217]: DEBUG nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 809.394026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.588s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.394026] env[68217]: DEBUG nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 809.399489] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.917s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.399923] env[68217]: DEBUG nova.objects.instance [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lazy-loading 'resources' on Instance uuid 693d6a74-a671-4d02-8798-cd3975507428 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 809.463852] env[68217]: INFO nova.compute.manager [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Rebuilding instance [ 809.484155] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961186, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.515458] env[68217]: DEBUG nova.compute.manager [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 809.516550] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bac4afdd-2178-402f-99e8-c842905f0fca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.566326] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961185, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.738147] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961187, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 809.779138] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.902899] env[68217]: DEBUG nova.compute.utils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 809.907108] env[68217]: DEBUG nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 809.907317] env[68217]: DEBUG nova.network.neutron [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 809.979095] env[68217]: DEBUG nova.compute.manager [req-6f819568-5a26-4363-9ab5-ede49ff1c6e3 req-c987caea-69d8-4355-8cdf-5538da21d3c7 service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Received event network-vif-plugged-96bf6c1f-33b5-4589-b488-c5be8d5892c6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 809.979268] env[68217]: DEBUG oslo_concurrency.lockutils [req-6f819568-5a26-4363-9ab5-ede49ff1c6e3 req-c987caea-69d8-4355-8cdf-5538da21d3c7 service nova] Acquiring lock "23366029-e754-49dc-ba56-7a0d92232d81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 809.979480] env[68217]: DEBUG oslo_concurrency.lockutils [req-6f819568-5a26-4363-9ab5-ede49ff1c6e3 req-c987caea-69d8-4355-8cdf-5538da21d3c7 service nova] Lock "23366029-e754-49dc-ba56-7a0d92232d81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 809.979645] env[68217]: DEBUG oslo_concurrency.lockutils [req-6f819568-5a26-4363-9ab5-ede49ff1c6e3 req-c987caea-69d8-4355-8cdf-5538da21d3c7 service nova] Lock "23366029-e754-49dc-ba56-7a0d92232d81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 809.979814] env[68217]: DEBUG nova.compute.manager [req-6f819568-5a26-4363-9ab5-ede49ff1c6e3 req-c987caea-69d8-4355-8cdf-5538da21d3c7 service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] No waiting events found dispatching network-vif-plugged-96bf6c1f-33b5-4589-b488-c5be8d5892c6 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 809.979981] env[68217]: WARNING nova.compute.manager [req-6f819568-5a26-4363-9ab5-ede49ff1c6e3 req-c987caea-69d8-4355-8cdf-5538da21d3c7 service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Received unexpected event network-vif-plugged-96bf6c1f-33b5-4589-b488-c5be8d5892c6 for instance with vm_state building and task_state spawning. [ 809.984699] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961186, 'name': ReconfigVM_Task, 'duration_secs': 0.677604} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 809.984968] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Reconfigured VM instance instance-00000038 to attach disk [datastore1] e642c93b-ca48-4d23-9abb-ff243855d8d0/e642c93b-ca48-4d23-9abb-ff243855d8d0.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 809.985582] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5e304a27-ffcb-459e-90b8-0f9e81ca1483 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.996209] env[68217]: DEBUG nova.policy [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f38774f9019a474c9f792e39802d969d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '118adafdba384a499099a282bdaac85e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 810.003026] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 810.003026] env[68217]: value = "task-2961188" [ 810.003026] env[68217]: _type = "Task" [ 810.003026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.010072] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961188, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.068131] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961185, 'name': CloneVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.237249] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961187, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.536543} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.237498] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a86015ea-fa6b-4cf8-9d79-273ffa02ec23/a86015ea-fa6b-4cf8-9d79-273ffa02ec23.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 810.237708] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 810.237993] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-391721b7-aced-421c-b937-afd6e30919b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.248237] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 810.248237] env[68217]: value = "task-2961190" [ 810.248237] env[68217]: _type = "Task" [ 810.248237] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.260125] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961190, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.309826] env[68217]: DEBUG nova.network.neutron [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Successfully updated port: 96bf6c1f-33b5-4589-b488-c5be8d5892c6 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 810.343407] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-361c208c-1b77-46aa-8798-93bbf396e4c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.349497] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4e99ce-a070-4040-b074-c18d62476978 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.384578] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a639ca15-d2bb-4e5d-a583-74f91a786350 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.393214] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf49769-eb79-4504-a69e-5a261c79697f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.407854] env[68217]: DEBUG nova.compute.provider_tree [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.410636] env[68217]: DEBUG nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 810.511771] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961188, 'name': Rename_Task, 'duration_secs': 0.338662} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.512216] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 810.512803] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-25ab9394-c4da-42a0-97c7-bff60f0efb48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.520706] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 810.520706] env[68217]: value = "task-2961191" [ 810.520706] env[68217]: _type = "Task" [ 810.520706] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.530235] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961191, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.534751] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 810.534751] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c1e7ba86-8818-4f2a-b4eb-59fe2df7bfd3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.542027] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 810.542027] env[68217]: value = "task-2961192" [ 810.542027] env[68217]: _type = "Task" [ 810.542027] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.550555] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.557217] env[68217]: DEBUG nova.network.neutron [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Successfully created port: cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 810.569420] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961185, 'name': CloneVM_Task, 'duration_secs': 1.56845} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.570097] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Created linked-clone VM from snapshot [ 810.571050] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f028aa9-cb8a-4819-b5cb-693ea0b40924 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.582558] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Uploading image 834e3301-26b6-432c-8685-944e0d978890 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 810.611369] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 810.611369] env[68217]: value = "vm-594264" [ 810.611369] env[68217]: _type = "VirtualMachine" [ 810.611369] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 810.611717] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-acd9b7c0-b3c4-47f1-9cdc-fa07189c07ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.623594] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease: (returnval){ [ 810.623594] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525bdbc9-0434-972f-126e-bef5b61e9654" [ 810.623594] env[68217]: _type = "HttpNfcLease" [ 810.623594] env[68217]: } obtained for exporting VM: (result){ [ 810.623594] env[68217]: value = "vm-594264" [ 810.623594] env[68217]: _type = "VirtualMachine" [ 810.623594] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 810.623971] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the lease: (returnval){ [ 810.623971] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525bdbc9-0434-972f-126e-bef5b61e9654" [ 810.623971] env[68217]: _type = "HttpNfcLease" [ 810.623971] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 810.632133] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 810.632133] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525bdbc9-0434-972f-126e-bef5b61e9654" [ 810.632133] env[68217]: _type = "HttpNfcLease" [ 810.632133] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 810.759119] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961190, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.271058} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 810.759397] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 810.760205] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d53f4dd-31c4-48f9-897d-3406f3b729b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.785539] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Reconfiguring VM instance instance-00000039 to attach disk [datastore1] a86015ea-fa6b-4cf8-9d79-273ffa02ec23/a86015ea-fa6b-4cf8-9d79-273ffa02ec23.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 810.785857] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4b5fe350-7216-4a45-8398-fafba8f85b22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.807768] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 810.807768] env[68217]: value = "task-2961194" [ 810.807768] env[68217]: _type = "Task" [ 810.807768] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 810.813334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "refresh_cache-23366029-e754-49dc-ba56-7a0d92232d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.813544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired lock "refresh_cache-23366029-e754-49dc-ba56-7a0d92232d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 810.813636] env[68217]: DEBUG nova.network.neutron [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 810.822533] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961194, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 810.914621] env[68217]: DEBUG nova.scheduler.client.report [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 811.032143] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961191, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.051958] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961192, 'name': PowerOffVM_Task, 'duration_secs': 0.419006} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.052766] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 811.053128] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 811.054038] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4dda562-74ec-4e5f-9cc6-707da67609f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.062751] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 811.062992] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d774d4c1-6b65-40a8-a872-30c0e2622148 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.132595] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 811.132595] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525bdbc9-0434-972f-126e-bef5b61e9654" [ 811.132595] env[68217]: _type = "HttpNfcLease" [ 811.132595] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 811.132962] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 811.132962] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525bdbc9-0434-972f-126e-bef5b61e9654" [ 811.132962] env[68217]: _type = "HttpNfcLease" [ 811.132962] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 811.133673] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a4d730-18e6-4a37-948e-79cb89afc049 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.142365] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b7767-9076-50f2-a6cb-64b7320bd780/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 811.142365] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b7767-9076-50f2-a6cb-64b7320bd780/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 811.205699] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 811.205911] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 811.206097] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleting the datastore file [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 811.206369] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e457c83-5ca3-4267-a20e-86344146f9bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.215908] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 811.215908] env[68217]: value = "task-2961196" [ 811.215908] env[68217]: _type = "Task" [ 811.215908] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.224955] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961196, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.324234] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-05a4193c-02d8-4be5-8ff5-8eb46f90e7e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.331743] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961194, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.357551] env[68217]: DEBUG nova.network.neutron [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 811.423453] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.024s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 811.427874] env[68217]: DEBUG nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 811.433397] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.257s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 811.433397] env[68217]: INFO nova.compute.claims [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.450652] env[68217]: INFO nova.scheduler.client.report [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Deleted allocations for instance 693d6a74-a671-4d02-8798-cd3975507428 [ 811.466786] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 811.467028] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 811.467191] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 811.467378] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 811.467521] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 811.467664] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 811.467868] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 811.468043] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 811.468217] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 811.468378] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 811.468559] env[68217]: DEBUG nova.virt.hardware [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 811.469426] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd663b6-0266-4cda-8d3a-dec0d897ff7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.480934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6a7efeb-0008-4050-b3e6-b96d3143ca72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.531161] env[68217]: DEBUG oslo_vmware.api [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961191, 'name': PowerOnVM_Task, 'duration_secs': 0.869998} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.531605] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 811.531658] env[68217]: INFO nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Took 11.49 seconds to spawn the instance on the hypervisor. [ 811.531828] env[68217]: DEBUG nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 811.532662] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f26cd386-d09c-45da-aec4-73dd3642c992 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.605232] env[68217]: DEBUG nova.network.neutron [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Updating instance_info_cache with network_info: [{"id": "96bf6c1f-33b5-4589-b488-c5be8d5892c6", "address": "fa:16:3e:b6:b7:18", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96bf6c1f-33", "ovs_interfaceid": "96bf6c1f-33b5-4589-b488-c5be8d5892c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.731022] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961196, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.166815} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.731022] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 811.731022] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 811.731022] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.821041] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961194, 'name': ReconfigVM_Task, 'duration_secs': 0.569006} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 811.821805] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Reconfigured VM instance instance-00000039 to attach disk [datastore1] a86015ea-fa6b-4cf8-9d79-273ffa02ec23/a86015ea-fa6b-4cf8-9d79-273ffa02ec23.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 811.822479] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a00c26f7-b7eb-40b7-98d4-f5d31e21b5b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.831119] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 811.831119] env[68217]: value = "task-2961197" [ 811.831119] env[68217]: _type = "Task" [ 811.831119] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 811.846069] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961197, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 811.965377] env[68217]: DEBUG oslo_concurrency.lockutils [None req-216278dd-346f-4f83-a6af-81678a00225b tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "693d6a74-a671-4d02-8798-cd3975507428" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 26.650s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.054773] env[68217]: INFO nova.compute.manager [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Took 38.32 seconds to build instance. [ 812.112019] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Releasing lock "refresh_cache-23366029-e754-49dc-ba56-7a0d92232d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 812.112019] env[68217]: DEBUG nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Instance network_info: |[{"id": "96bf6c1f-33b5-4589-b488-c5be8d5892c6", "address": "fa:16:3e:b6:b7:18", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96bf6c1f-33", "ovs_interfaceid": "96bf6c1f-33b5-4589-b488-c5be8d5892c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 812.112213] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b6:b7:18', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '96bf6c1f-33b5-4589-b488-c5be8d5892c6', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.118917] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Creating folder: Project (118adafdba384a499099a282bdaac85e). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 812.120487] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec429365-fb57-44e1-aa18-cb7c823c1855 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.137261] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Created folder: Project (118adafdba384a499099a282bdaac85e) in parent group-v594094. [ 812.137261] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Creating folder: Instances. Parent ref: group-v594265. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 812.137261] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-58db5e06-ef67-46b3-84d6-14152894a58b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.149667] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Created folder: Instances in parent group-v594265. [ 812.149923] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.150249] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 812.150519] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-389a5696-c962-4402-8efd-93386086e0b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.174054] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 812.174054] env[68217]: value = "task-2961200" [ 812.174054] env[68217]: _type = "Task" [ 812.174054] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.182588] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961200, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.261539] env[68217]: DEBUG nova.compute.manager [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Received event network-changed-96bf6c1f-33b5-4589-b488-c5be8d5892c6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 812.261988] env[68217]: DEBUG nova.compute.manager [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Refreshing instance network info cache due to event network-changed-96bf6c1f-33b5-4589-b488-c5be8d5892c6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 812.262332] env[68217]: DEBUG oslo_concurrency.lockutils [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] Acquiring lock "refresh_cache-23366029-e754-49dc-ba56-7a0d92232d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 812.262777] env[68217]: DEBUG oslo_concurrency.lockutils [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] Acquired lock "refresh_cache-23366029-e754-49dc-ba56-7a0d92232d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 812.262907] env[68217]: DEBUG nova.network.neutron [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Refreshing network info cache for port 96bf6c1f-33b5-4589-b488-c5be8d5892c6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 812.344504] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961197, 'name': Rename_Task, 'duration_secs': 0.267065} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 812.344755] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 812.345508] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e62a5159-7efa-437d-8907-ccdc0bd568dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.354735] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 812.354735] env[68217]: value = "task-2961201" [ 812.354735] env[68217]: _type = "Task" [ 812.354735] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.369661] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961201, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.379610] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 812.379872] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594263', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'name': 'volume-d5de6532-0a92-4a7a-b66b-09e2c100b998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a01c2c5-3108-4382-85c5-a5ea5e6e160c', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'serial': 'd5de6532-0a92-4a7a-b66b-09e2c100b998'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 812.380764] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c920e79c-b618-4cf6-8cfd-1faaa53ed674 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.402529] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-970e2f81-8483-4496-900f-5bbdf71466bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.437495] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Reconfiguring VM instance instance-00000036 to attach disk [datastore1] volume-d5de6532-0a92-4a7a-b66b-09e2c100b998/volume-d5de6532-0a92-4a7a-b66b-09e2c100b998.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 812.437495] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ff5706a5-2e5a-4ae7-964b-28fd9c9536ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.462245] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 812.462245] env[68217]: value = "task-2961202" [ 812.462245] env[68217]: _type = "Task" [ 812.462245] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.474346] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961202, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.557560] env[68217]: DEBUG oslo_concurrency.lockutils [None req-048a50a4-c365-4812-bf23-a8ba660973a1 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.779s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 812.688217] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961200, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.768749] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 812.769047] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 812.769211] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 812.769516] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 812.769671] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 812.769817] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 812.770040] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 812.770254] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 812.770427] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 812.770589] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 812.770788] env[68217]: DEBUG nova.virt.hardware [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 812.773729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e0b757b-b6e0-40cc-a2f4-286087afb9d5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.785374] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfb87f08-6803-4e5d-ac5a-02acb8a97f08 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.804435] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7d:95:c1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b029c255-6f3f-41b3-ba5c-16ca2a968c6e', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 812.811964] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 812.814936] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 812.815733] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a6f588e2-b856-4bb7-826c-1e5105f18113 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.840429] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 812.840429] env[68217]: value = "task-2961203" [ 812.840429] env[68217]: _type = "Task" [ 812.840429] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 812.851736] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961203, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.869435] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961201, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 812.923220] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b01daab7-8eee-4d77-b519-b412c2ee3ef2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.937835] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f6443da-ec93-43ed-b576-77a50923bf3e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.983212] env[68217]: DEBUG nova.network.neutron [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Successfully updated port: cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 812.991418] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02cf5cb6-b0ff-4541-9727-fd75381e92f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.009555] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961202, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.011351] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497f6583-fdb5-4b50-9bb0-e5af4bfd8d16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.032805] env[68217]: DEBUG nova.compute.provider_tree [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 813.188520] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961200, 'name': CreateVM_Task, 'duration_secs': 0.529727} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.188779] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 813.189533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.189749] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.190117] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 813.190456] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-764c5c1b-b7c0-466b-91ca-dca0756e1f41 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.197324] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 813.197324] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523b070c-de91-a942-0c6c-54db19af1b47" [ 813.197324] env[68217]: _type = "Task" [ 813.197324] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.208911] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523b070c-de91-a942-0c6c-54db19af1b47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.346667] env[68217]: DEBUG nova.network.neutron [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Updated VIF entry in instance network info cache for port 96bf6c1f-33b5-4589-b488-c5be8d5892c6. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 813.347210] env[68217]: DEBUG nova.network.neutron [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Updating instance_info_cache with network_info: [{"id": "96bf6c1f-33b5-4589-b488-c5be8d5892c6", "address": "fa:16:3e:b6:b7:18", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.94", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96bf6c1f-33", "ovs_interfaceid": "96bf6c1f-33b5-4589-b488-c5be8d5892c6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.355153] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961203, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.370793] env[68217]: DEBUG oslo_vmware.api [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961201, 'name': PowerOnVM_Task, 'duration_secs': 0.674085} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.370793] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 813.370993] env[68217]: INFO nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Took 9.13 seconds to spawn the instance on the hypervisor. [ 813.371191] env[68217]: DEBUG nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 813.372246] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5752b18e-2f14-4887-868d-d6a26b27983c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.494269] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961202, 'name': ReconfigVM_Task, 'duration_secs': 0.774157} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.494692] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Reconfigured VM instance instance-00000036 to attach disk [datastore1] volume-d5de6532-0a92-4a7a-b66b-09e2c100b998/volume-d5de6532-0a92-4a7a-b66b-09e2c100b998.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 813.503879] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "refresh_cache-b7fe971e-353f-427c-896c-32f9de0d70e7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.503879] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired lock "refresh_cache-b7fe971e-353f-427c-896c-32f9de0d70e7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.503879] env[68217]: DEBUG nova.network.neutron [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 813.504927] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f482387-4024-4975-9fc0-7bc0cc0ff2a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.531023] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 813.531023] env[68217]: value = "task-2961204" [ 813.531023] env[68217]: _type = "Task" [ 813.531023] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.537344] env[68217]: DEBUG nova.scheduler.client.report [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 813.548444] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961204, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.664197] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "e642c93b-ca48-4d23-9abb-ff243855d8d0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.664197] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.664197] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "e642c93b-ca48-4d23-9abb-ff243855d8d0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 813.664371] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 813.664524] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 813.666832] env[68217]: INFO nova.compute.manager [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Terminating instance [ 813.714202] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523b070c-de91-a942-0c6c-54db19af1b47, 'name': SearchDatastore_Task, 'duration_secs': 0.01338} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 813.714202] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.714202] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 813.714202] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 813.714464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 813.714464] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 813.714464] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87a47caa-cb74-484a-af26-0cbc249bae41 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.724104] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 813.724302] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 813.725042] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-817662bd-8a27-4907-8a4e-980dc152ec14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 813.731078] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 813.731078] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521a5ab5-e091-6c63-4d48-7f23cc644967" [ 813.731078] env[68217]: _type = "Task" [ 813.731078] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 813.739500] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521a5ab5-e091-6c63-4d48-7f23cc644967, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.850588] env[68217]: DEBUG oslo_concurrency.lockutils [req-2739e488-4a01-48fb-bb48-0346a8ffc582 req-a49dc639-0826-45b5-b2e0-4475cdd5cb2c service nova] Releasing lock "refresh_cache-23366029-e754-49dc-ba56-7a0d92232d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 813.854299] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961203, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 813.895070] env[68217]: INFO nova.compute.manager [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Took 35.31 seconds to build instance. [ 814.043168] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.612s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.043793] env[68217]: DEBUG nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 814.046597] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961204, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.046906] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.948s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.048438] env[68217]: INFO nova.compute.claims [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 814.083607] env[68217]: DEBUG nova.network.neutron [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 814.170681] env[68217]: DEBUG nova.compute.manager [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 814.170925] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 814.171831] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d740b21-a637-4efc-a3b6-6aab06e124b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.181266] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 814.181525] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3b2b7e48-9587-402b-bae6-7c36c9c9409c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.189280] env[68217]: DEBUG oslo_vmware.api [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 814.189280] env[68217]: value = "task-2961205" [ 814.189280] env[68217]: _type = "Task" [ 814.189280] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.198720] env[68217]: DEBUG oslo_vmware.api [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961205, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.243253] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521a5ab5-e091-6c63-4d48-7f23cc644967, 'name': SearchDatastore_Task, 'duration_secs': 0.011001} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.244124] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-66f65dd9-9e39-47fa-8df9-98ccc4f6c002 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.250317] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 814.250317] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5244d405-87a5-a561-3fb8-66be75384368" [ 814.250317] env[68217]: _type = "Task" [ 814.250317] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.269088] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5244d405-87a5-a561-3fb8-66be75384368, 'name': SearchDatastore_Task} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.269088] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.269088] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 23366029-e754-49dc-ba56-7a0d92232d81/23366029-e754-49dc-ba56-7a0d92232d81.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 814.269088] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d49e04b4-30ca-4fd8-95c6-8d0349d9f85c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.277322] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 814.277322] env[68217]: value = "task-2961206" [ 814.277322] env[68217]: _type = "Task" [ 814.277322] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.287742] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961206, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.304104] env[68217]: DEBUG nova.compute.manager [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Received event network-vif-plugged-cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.304418] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] Acquiring lock "b7fe971e-353f-427c-896c-32f9de0d70e7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 814.304605] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 814.304771] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.305493] env[68217]: DEBUG nova.compute.manager [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] No waiting events found dispatching network-vif-plugged-cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 814.305493] env[68217]: WARNING nova.compute.manager [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Received unexpected event network-vif-plugged-cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 for instance with vm_state building and task_state spawning. [ 814.305803] env[68217]: DEBUG nova.compute.manager [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Received event network-changed-cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 814.305803] env[68217]: DEBUG nova.compute.manager [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Refreshing instance network info cache due to event network-changed-cdcfcb29-1a21-4c5f-a612-ab70b34a44b1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 814.305895] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] Acquiring lock "refresh_cache-b7fe971e-353f-427c-896c-32f9de0d70e7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.356318] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961203, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.370738] env[68217]: DEBUG nova.network.neutron [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Updating instance_info_cache with network_info: [{"id": "cdcfcb29-1a21-4c5f-a612-ab70b34a44b1", "address": "fa:16:3e:42:19:77", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcfcb29-1a", "ovs_interfaceid": "cdcfcb29-1a21-4c5f-a612-ab70b34a44b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 814.397493] env[68217]: DEBUG oslo_concurrency.lockutils [None req-84ac01fb-fa77-4ca6-90c6-3ec762199140 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.391s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 814.544787] env[68217]: DEBUG oslo_vmware.api [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961204, 'name': ReconfigVM_Task, 'duration_secs': 0.988524} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.546804] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594263', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'name': 'volume-d5de6532-0a92-4a7a-b66b-09e2c100b998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a01c2c5-3108-4382-85c5-a5ea5e6e160c', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'serial': 'd5de6532-0a92-4a7a-b66b-09e2c100b998'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 814.560025] env[68217]: DEBUG nova.compute.utils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 814.560025] env[68217]: DEBUG nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 814.560025] env[68217]: DEBUG nova.network.neutron [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 814.625491] env[68217]: DEBUG nova.policy [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4d78489bbf4a4eeebc7922a4656aca26', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a6348b1f20794ee9a016d409eee576e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 814.705024] env[68217]: DEBUG oslo_vmware.api [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961205, 'name': PowerOffVM_Task, 'duration_secs': 0.367051} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.705024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 814.705024] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 814.705024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7dc347dd-b91e-4210-90d7-1056dfeab41c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.795538] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 814.795538] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 814.795538] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Deleting the datastore file [datastore1] e642c93b-ca48-4d23-9abb-ff243855d8d0 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 814.799829] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c85f6ff6-1278-44a3-ba79-2bf487168732 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.802281] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961206, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.807549] env[68217]: DEBUG oslo_vmware.api [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for the task: (returnval){ [ 814.807549] env[68217]: value = "task-2961208" [ 814.807549] env[68217]: _type = "Task" [ 814.807549] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.817064] env[68217]: DEBUG oslo_vmware.api [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961208, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.856196] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961203, 'name': CreateVM_Task, 'duration_secs': 1.566966} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 814.856196] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 814.856658] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 814.856835] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.857205] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 814.857491] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-90ec0b29-12e6-45ab-8542-a36ab2f21e85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.865021] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 814.865021] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5200edf6-2665-81dd-7b54-25f81145bc53" [ 814.865021] env[68217]: _type = "Task" [ 814.865021] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.874572] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5200edf6-2665-81dd-7b54-25f81145bc53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 814.875097] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Releasing lock "refresh_cache-b7fe971e-353f-427c-896c-32f9de0d70e7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 814.875417] env[68217]: DEBUG nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Instance network_info: |[{"id": "cdcfcb29-1a21-4c5f-a612-ab70b34a44b1", "address": "fa:16:3e:42:19:77", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcfcb29-1a", "ovs_interfaceid": "cdcfcb29-1a21-4c5f-a612-ab70b34a44b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 814.875711] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] Acquired lock "refresh_cache-b7fe971e-353f-427c-896c-32f9de0d70e7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 814.875883] env[68217]: DEBUG nova.network.neutron [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Refreshing network info cache for port cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 814.877234] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:19:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'cdcfcb29-1a21-4c5f-a612-ab70b34a44b1', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 814.885958] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 814.885958] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 814.886077] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9268842-5976-4a7e-84bb-cf4d6967ee8b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.912985] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 814.912985] env[68217]: value = "task-2961209" [ 814.912985] env[68217]: _type = "Task" [ 814.912985] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 814.927454] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961209, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.065219] env[68217]: DEBUG nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 815.228891] env[68217]: DEBUG nova.network.neutron [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Successfully created port: 6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 815.295067] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961206, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.552559} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.295349] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 23366029-e754-49dc-ba56-7a0d92232d81/23366029-e754-49dc-ba56-7a0d92232d81.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 815.295569] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 815.295886] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-456dacc0-a900-4085-8cf5-83f49b2332a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.313318] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 815.313318] env[68217]: value = "task-2961210" [ 815.313318] env[68217]: _type = "Task" [ 815.313318] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.324549] env[68217]: DEBUG oslo_vmware.api [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Task: {'id': task-2961208, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160279} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.325951] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 815.326117] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 815.326328] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 815.326531] env[68217]: INFO nova.compute.manager [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Took 1.16 seconds to destroy the instance on the hypervisor. [ 815.326894] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 815.327148] env[68217]: DEBUG nova.compute.manager [-] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 815.327283] env[68217]: DEBUG nova.network.neutron [-] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 815.334061] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961210, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.365166] env[68217]: DEBUG nova.compute.manager [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Received event network-changed-21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 815.365497] env[68217]: DEBUG nova.compute.manager [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Refreshing instance network info cache due to event network-changed-21f37b3b-0b0a-412e-8413-f3a1967f5c79. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 815.365719] env[68217]: DEBUG oslo_concurrency.lockutils [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] Acquiring lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.365859] env[68217]: DEBUG oslo_concurrency.lockutils [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] Acquired lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.366811] env[68217]: DEBUG nova.network.neutron [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Refreshing network info cache for port 21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 815.383069] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5200edf6-2665-81dd-7b54-25f81145bc53, 'name': SearchDatastore_Task, 'duration_secs': 0.010558} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.383435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.383674] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 815.383907] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.384117] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.384236] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 815.384515] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3fb288b9-7934-4b58-b1d2-34b823d1d571 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.404967] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 815.405167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 815.408580] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e9f66b36-8d93-4690-91c3-90a53263a7ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.420682] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 815.420682] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524dcb8a-a19e-7b9c-c771-7f4f657f6947" [ 815.420682] env[68217]: _type = "Task" [ 815.420682] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.430047] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961209, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.437719] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524dcb8a-a19e-7b9c-c771-7f4f657f6947, 'name': SearchDatastore_Task, 'duration_secs': 0.011354} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.438804] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-233cc762-82d0-426f-9f53-41892712be70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.446628] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 815.446628] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5281b2f3-49c7-3d1b-e36a-450825ae6716" [ 815.446628] env[68217]: _type = "Task" [ 815.446628] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.459800] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5281b2f3-49c7-3d1b-e36a-450825ae6716, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.488786] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "b5e15801-301a-4ee6-87d2-bbf749967631" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.489073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "b5e15801-301a-4ee6-87d2-bbf749967631" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.489242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "b5e15801-301a-4ee6-87d2-bbf749967631-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 815.489417] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "b5e15801-301a-4ee6-87d2-bbf749967631-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 815.489573] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "b5e15801-301a-4ee6-87d2-bbf749967631-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 815.491584] env[68217]: INFO nova.compute.manager [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Terminating instance [ 815.583702] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a19ea2-c5c3-4447-be83-aae57f68e7f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.595132] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a40918df-9d4d-4d9e-b182-06a7a9c8a431 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.631022] env[68217]: DEBUG nova.objects.instance [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lazy-loading 'flavor' on Instance uuid 7a01c2c5-3108-4382-85c5-a5ea5e6e160c {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 815.636055] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ffc1c7-d7a0-4138-8605-9033bf788ebf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.648135] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6c5ae3-8153-47fb-a7fc-eeb0a740ab97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.665078] env[68217]: DEBUG nova.compute.provider_tree [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.824900] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961210, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.091889} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.825405] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 815.826323] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de3383f-8c58-49bb-9feb-e9601bee912f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.850681] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Reconfiguring VM instance instance-0000003a to attach disk [datastore1] 23366029-e754-49dc-ba56-7a0d92232d81/23366029-e754-49dc-ba56-7a0d92232d81.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 815.851024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46f3547f-6ead-4488-a84d-21638c6b3cce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.874895] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 815.874895] env[68217]: value = "task-2961211" [ 815.874895] env[68217]: _type = "Task" [ 815.874895] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.885870] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961211, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.924670] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961209, 'name': CreateVM_Task, 'duration_secs': 0.554964} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.924893] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 815.925717] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.925909] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 815.926310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 815.926626] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fc69640-26d3-4920-b604-59b5e7043ef7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.933109] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 815.933109] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52133838-bd2b-f7b7-f024-10a043c09be1" [ 815.933109] env[68217]: _type = "Task" [ 815.933109] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.943563] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52133838-bd2b-f7b7-f024-10a043c09be1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.960613] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5281b2f3-49c7-3d1b-e36a-450825ae6716, 'name': SearchDatastore_Task, 'duration_secs': 0.01295} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 815.960890] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 815.961166] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 815.961439] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b1778c4d-3b90-40df-ac84-d81565f6c4ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.971086] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 815.971086] env[68217]: value = "task-2961212" [ 815.971086] env[68217]: _type = "Task" [ 815.971086] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 815.978494] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961212, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 815.998053] env[68217]: DEBUG nova.compute.manager [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 815.998053] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 815.998053] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51974a62-c4f5-4e3a-a9a6-8f45b1733d47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.007254] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 816.007617] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17961e1d-9988-49e7-b9ff-28ba497a22ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.018942] env[68217]: DEBUG oslo_vmware.api [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 816.018942] env[68217]: value = "task-2961213" [ 816.018942] env[68217]: _type = "Task" [ 816.018942] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.029772] env[68217]: DEBUG oslo_vmware.api [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961213, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.083026] env[68217]: DEBUG nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 816.122959] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 816.123148] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 816.123328] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 816.123518] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 816.123658] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 816.124283] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 816.124283] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 816.125415] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 816.125586] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 816.125765] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 816.125942] env[68217]: DEBUG nova.virt.hardware [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 816.126838] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86065bde-4cb2-40be-b19b-f534b881d63d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.138368] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82cb5ba0-9bb0-4d4a-a874-82290273124a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.147661] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d350dd33-eaa1-4400-aebf-051cb2b10cdc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.414s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.170500] env[68217]: DEBUG nova.scheduler.client.report [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 816.247524] env[68217]: DEBUG nova.network.neutron [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Updated VIF entry in instance network info cache for port cdcfcb29-1a21-4c5f-a612-ab70b34a44b1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 816.247931] env[68217]: DEBUG nova.network.neutron [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Updating instance_info_cache with network_info: [{"id": "cdcfcb29-1a21-4c5f-a612-ab70b34a44b1", "address": "fa:16:3e:42:19:77", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.235", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapcdcfcb29-1a", "ovs_interfaceid": "cdcfcb29-1a21-4c5f-a612-ab70b34a44b1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.392682] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961211, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.447838] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52133838-bd2b-f7b7-f024-10a043c09be1, 'name': SearchDatastore_Task, 'duration_secs': 0.01119} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.448754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.449163] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 816.449526] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.449707] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 816.449970] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 816.450375] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8aa8b54a-76e8-4944-bec6-8257056c4da1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.469317] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 816.469710] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 816.472261] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f06ce5c-d0c2-4eea-8000-c17665de461e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.482074] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 816.482074] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52691d2b-e999-fb41-4a57-4f977d76f5c0" [ 816.482074] env[68217]: _type = "Task" [ 816.482074] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.485808] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961212, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.494748] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52691d2b-e999-fb41-4a57-4f977d76f5c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.497281] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.497594] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.497810] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.497998] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.498188] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.500426] env[68217]: INFO nova.compute.manager [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Terminating instance [ 816.530125] env[68217]: DEBUG oslo_vmware.api [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961213, 'name': PowerOffVM_Task, 'duration_secs': 0.325266} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.530438] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 816.530605] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 816.531151] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5077902-c00e-486b-9c00-7620ae307565 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.567500] env[68217]: DEBUG nova.network.neutron [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updated VIF entry in instance network info cache for port 21f37b3b-0b0a-412e-8413-f3a1967f5c79. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 816.567904] env[68217]: DEBUG nova.network.neutron [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updating instance_info_cache with network_info: [{"id": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "address": "fa:16:3e:d4:23:d4", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21f37b3b-0b", "ovs_interfaceid": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.606269] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 816.606538] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 816.606719] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Deleting the datastore file [datastore2] b5e15801-301a-4ee6-87d2-bbf749967631 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 816.607242] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a99713c1-b14e-4f0f-88c0-39d19bba5b72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.617248] env[68217]: DEBUG oslo_vmware.api [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for the task: (returnval){ [ 816.617248] env[68217]: value = "task-2961215" [ 816.617248] env[68217]: _type = "Task" [ 816.617248] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.627145] env[68217]: DEBUG oslo_vmware.api [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.637408] env[68217]: DEBUG nova.compute.manager [req-62631c9a-3091-4270-b278-39cc8338c6bd req-f0bcfe6c-3d8a-4e49-a141-877b4e320f69 service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Received event network-vif-deleted-1301548f-a001-481f-8e30-4f1a5721d9a9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 816.637408] env[68217]: INFO nova.compute.manager [req-62631c9a-3091-4270-b278-39cc8338c6bd req-f0bcfe6c-3d8a-4e49-a141-877b4e320f69 service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Neutron deleted interface 1301548f-a001-481f-8e30-4f1a5721d9a9; detaching it from the instance and deleting it from the info cache [ 816.637408] env[68217]: DEBUG nova.network.neutron [req-62631c9a-3091-4270-b278-39cc8338c6bd req-f0bcfe6c-3d8a-4e49-a141-877b4e320f69 service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.678243] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.631s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.678781] env[68217]: DEBUG nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 816.681418] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 17.879s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.681614] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 816.685243] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 816.685563] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.459s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 816.687076] env[68217]: INFO nova.compute.claims [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 816.695017] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1172896-7565-4164-b1ec-b6789f7ca175 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.702934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dccd2f0-14df-4289-ad60-703aa4d5c2b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.722299] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3309c695-ee20-4589-805c-869a83dd5b12 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.732613] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdbb0b98-0ed8-458a-808d-e79f97fe2dd5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.768744] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec26b4b6-fb55-4c47-9b7a-019f5dd6c251 req-e9b8c0c4-90c7-4bbd-9294-d536db77d621 service nova] Releasing lock "refresh_cache-b7fe971e-353f-427c-896c-32f9de0d70e7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 816.769272] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179144MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 816.769449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 816.770969] env[68217]: DEBUG nova.network.neutron [-] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.886014] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961211, 'name': ReconfigVM_Task, 'duration_secs': 0.522631} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.886677] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Reconfigured VM instance instance-0000003a to attach disk [datastore1] 23366029-e754-49dc-ba56-7a0d92232d81/23366029-e754-49dc-ba56-7a0d92232d81.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 816.886965] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-953f4ad1-f962-4c1c-b409-296d20caa450 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.893990] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 816.893990] env[68217]: value = "task-2961216" [ 816.893990] env[68217]: _type = "Task" [ 816.893990] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.903327] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961216, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 816.982391] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961212, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.521354} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.982713] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 816.982926] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 816.983271] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d83594b-ae45-47bc-a9b4-12001909bfeb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.991852] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 816.991852] env[68217]: value = "task-2961217" [ 816.991852] env[68217]: _type = "Task" [ 816.991852] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 816.995518] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52691d2b-e999-fb41-4a57-4f977d76f5c0, 'name': SearchDatastore_Task, 'duration_secs': 0.012413} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 816.999240] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1d2135e8-6b89-4286-9d3f-366404d3add8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.005792] env[68217]: DEBUG nova.compute.manager [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 817.006046] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 817.006417] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 817.006417] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526818c9-d1e0-5338-9f92-0f1065f88fbe" [ 817.006417] env[68217]: _type = "Task" [ 817.006417] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.009283] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4264b638-9c12-4aa3-bbcc-1dfabc5bca96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.010988] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.020294] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526818c9-d1e0-5338-9f92-0f1065f88fbe, 'name': SearchDatastore_Task, 'duration_secs': 0.010356} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.021659] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.022051] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] b7fe971e-353f-427c-896c-32f9de0d70e7/b7fe971e-353f-427c-896c-32f9de0d70e7.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 817.022312] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 817.022312] env[68217]: value = "task-2961218" [ 817.022312] env[68217]: _type = "Task" [ 817.022312] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.022511] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a0020005-5540-4c08-b896-8cbb5863e53d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.034038] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961218, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.035873] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 817.035873] env[68217]: value = "task-2961219" [ 817.035873] env[68217]: _type = "Task" [ 817.035873] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.046856] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961219, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.070831] env[68217]: DEBUG oslo_concurrency.lockutils [req-25a36e7f-d171-41ef-b507-71f888b85a77 req-77678bd1-7d4f-4929-abab-536a49dd2725 service nova] Releasing lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 817.130762] env[68217]: DEBUG oslo_vmware.api [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Task: {'id': task-2961215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176559} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.130912] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 817.131109] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 817.131365] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 817.131658] env[68217]: INFO nova.compute.manager [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Took 1.14 seconds to destroy the instance on the hypervisor. [ 817.131922] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 817.132171] env[68217]: DEBUG nova.compute.manager [-] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 817.132536] env[68217]: DEBUG nova.network.neutron [-] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 817.139624] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-357fb3c6-a945-4cc2-87c3-c0d89d3b7c19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.152598] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c5544a7-ee8b-47fc-9867-0f05ce2038d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.197102] env[68217]: DEBUG nova.compute.utils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 817.198418] env[68217]: DEBUG nova.compute.manager [req-62631c9a-3091-4270-b278-39cc8338c6bd req-f0bcfe6c-3d8a-4e49-a141-877b4e320f69 service nova] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Detach interface failed, port_id=1301548f-a001-481f-8e30-4f1a5721d9a9, reason: Instance e642c93b-ca48-4d23-9abb-ff243855d8d0 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 817.199255] env[68217]: DEBUG nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 817.199426] env[68217]: DEBUG nova.network.neutron [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 817.275893] env[68217]: INFO nova.compute.manager [-] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Took 1.95 seconds to deallocate network for instance. [ 817.330205] env[68217]: DEBUG nova.policy [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fcfd9e5288b4ee2b012a0a2cf242d7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bf05c5ad8574e0f858cd2261af9ef24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 817.426244] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961216, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.510508] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.082607} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.510987] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 817.512534] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2f8236d-67be-42ba-967a-9319bf572875 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.546038] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Reconfiguring VM instance instance-00000037 to attach disk [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 817.550593] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d21cd5e5-63c2-4dcd-9928-ddf49d1f7fcc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.576081] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961219, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530624} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.580318] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] b7fe971e-353f-427c-896c-32f9de0d70e7/b7fe971e-353f-427c-896c-32f9de0d70e7.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 817.581052] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 817.581396] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961218, 'name': PowerOffVM_Task, 'duration_secs': 0.316655} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.583455] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d21254d3-731c-4b2c-afcd-934cfe100374 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.585826] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 817.586079] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 817.586357] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594263', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'name': 'volume-d5de6532-0a92-4a7a-b66b-09e2c100b998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a01c2c5-3108-4382-85c5-a5ea5e6e160c', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'serial': 'd5de6532-0a92-4a7a-b66b-09e2c100b998'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 817.586764] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 817.586764] env[68217]: value = "task-2961220" [ 817.586764] env[68217]: _type = "Task" [ 817.586764] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.587602] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cacc80af-1d6f-423b-ae77-9caa4ade4b1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.596987] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 817.596987] env[68217]: value = "task-2961221" [ 817.596987] env[68217]: _type = "Task" [ 817.596987] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.619930] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961220, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.625502] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee12207-11c9-4302-9e98-45c7de9ddd86 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.633398] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961221, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.635846] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec8c4903-ff8b-44ff-8ae0-305e068ca657 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.657337] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47e78ca-642a-4dd2-b148-36e2f4a4bbfe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.676015] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] The volume has not been displaced from its original location: [datastore1] volume-d5de6532-0a92-4a7a-b66b-09e2c100b998/volume-d5de6532-0a92-4a7a-b66b-09e2c100b998.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 817.685019] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Reconfiguring VM instance instance-00000036 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 817.685019] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-06e73cc7-5a56-479b-a01d-cb40bb1d4fb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.700502] env[68217]: DEBUG nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 817.708723] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 817.708723] env[68217]: value = "task-2961222" [ 817.708723] env[68217]: _type = "Task" [ 817.708723] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.719996] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961222, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 817.785419] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 817.906942] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961216, 'name': Rename_Task, 'duration_secs': 0.664156} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 817.908150] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 817.908150] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea645535-e5d3-42e4-a229-1eab0ebdf7e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.917825] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 817.917825] env[68217]: value = "task-2961223" [ 817.917825] env[68217]: _type = "Task" [ 817.917825] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 817.927175] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961223, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.101932] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961220, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.130727] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961221, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07264} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.131270] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 818.132171] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61ea09fd-d9bd-46c0-84ca-4ae8e22ff8a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.156173] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Reconfiguring VM instance instance-0000003b to attach disk [datastore1] b7fe971e-353f-427c-896c-32f9de0d70e7/b7fe971e-353f-427c-896c-32f9de0d70e7.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 818.159629] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0bec39cd-4dcd-49bb-beca-51a65f54e7fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.187088] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 818.187088] env[68217]: value = "task-2961224" [ 818.187088] env[68217]: _type = "Task" [ 818.187088] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.199292] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961224, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.201073] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d467828-dddb-48bb-953c-172cebb13faf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.222996] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c4bfd4e-7913-4813-b502-846a015f5c70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.232436] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961222, 'name': ReconfigVM_Task, 'duration_secs': 0.292062} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 818.256744] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Reconfigured VM instance instance-00000036 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 818.262799] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4322cbd1-8eb5-4e79-a94e-aa14b9f48ec3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.273251] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b3cad0d-7130-4419-b9c1-57ff13f3f304 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.282654] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 818.282654] env[68217]: value = "task-2961225" [ 818.282654] env[68217]: _type = "Task" [ 818.282654] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 818.288933] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d899b8e-2d90-4499-b93f-3c7caab7eb85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.303949] env[68217]: DEBUG nova.compute.provider_tree [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.308102] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961225, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.429543] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961223, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.471638] env[68217]: DEBUG nova.network.neutron [-] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.503895] env[68217]: DEBUG nova.network.neutron [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Successfully updated port: 6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 818.588047] env[68217]: DEBUG nova.network.neutron [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Successfully created port: 9f5258ed-d071-4e53-9f7e-43d85ef5ae04 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 818.605708] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961220, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.698038] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961224, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.713898] env[68217]: DEBUG nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 818.748824] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 818.748950] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 818.749041] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 818.749193] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 818.749367] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 818.749568] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 818.749790] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 818.749947] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 818.750120] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 818.750314] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 818.750494] env[68217]: DEBUG nova.virt.hardware [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 818.751401] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925969db-57b3-4784-b853-5926076f4241 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.760284] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2444238-582b-4f52-9e33-dff7a5c72a49 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.794697] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961225, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.806098] env[68217]: DEBUG nova.compute.manager [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received event network-vif-plugged-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.806098] env[68217]: DEBUG oslo_concurrency.lockutils [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] Acquiring lock "2e3dae16-dba3-4230-913d-7a5c3469e36e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 818.806098] env[68217]: DEBUG oslo_concurrency.lockutils [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 818.806098] env[68217]: DEBUG oslo_concurrency.lockutils [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 818.806246] env[68217]: DEBUG nova.compute.manager [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] No waiting events found dispatching network-vif-plugged-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 818.806384] env[68217]: WARNING nova.compute.manager [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received unexpected event network-vif-plugged-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 for instance with vm_state building and task_state spawning. [ 818.806462] env[68217]: DEBUG nova.compute.manager [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Received event network-vif-deleted-2077ec71-7159-4678-b110-039046651648 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.806606] env[68217]: DEBUG nova.compute.manager [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 818.806760] env[68217]: DEBUG nova.compute.manager [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing instance network info cache due to event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 818.806931] env[68217]: DEBUG oslo_concurrency.lockutils [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] Acquiring lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 818.807065] env[68217]: DEBUG oslo_concurrency.lockutils [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] Acquired lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 818.807212] env[68217]: DEBUG nova.network.neutron [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 818.809648] env[68217]: DEBUG nova.scheduler.client.report [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 818.931697] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961223, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 818.975122] env[68217]: INFO nova.compute.manager [-] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Took 1.84 seconds to deallocate network for instance. [ 819.008298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.107283] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961220, 'name': ReconfigVM_Task, 'duration_secs': 1.474397} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.107664] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Reconfigured VM instance instance-00000037 to attach disk [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d/ba39e563-3e3a-40aa-815f-760f0f37a55d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.108906] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c992c431-6482-4ac1-98aa-bb7881e6d743 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.117198] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 819.117198] env[68217]: value = "task-2961226" [ 819.117198] env[68217]: _type = "Task" [ 819.117198] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.128513] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961226, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.200157] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961224, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.215998] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b7767-9076-50f2-a6cb-64b7320bd780/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 819.216982] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1bbe089-a023-4b3f-8703-3e79c0147cb4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.225016] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b7767-9076-50f2-a6cb-64b7320bd780/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 819.225237] env[68217]: ERROR oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b7767-9076-50f2-a6cb-64b7320bd780/disk-0.vmdk due to incomplete transfer. [ 819.225530] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-d81d917c-e006-40fd-8f00-63329627ae8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.234248] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/523b7767-9076-50f2-a6cb-64b7320bd780/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 819.234480] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Uploaded image 834e3301-26b6-432c-8685-944e0d978890 to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 819.236639] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 819.236930] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-0504ac94-7535-43ea-ad1b-945a7e4a10ac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.245582] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 819.245582] env[68217]: value = "task-2961227" [ 819.245582] env[68217]: _type = "Task" [ 819.245582] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.257471] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961227, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.296335] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961225, 'name': ReconfigVM_Task, 'duration_secs': 0.888181} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.296682] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594263', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'name': 'volume-d5de6532-0a92-4a7a-b66b-09e2c100b998', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '7a01c2c5-3108-4382-85c5-a5ea5e6e160c', 'attached_at': '', 'detached_at': '', 'volume_id': 'd5de6532-0a92-4a7a-b66b-09e2c100b998', 'serial': 'd5de6532-0a92-4a7a-b66b-09e2c100b998'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 819.296979] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 819.297782] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ede6e610-e670-40ed-8acb-eb2e04cf5c18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.305626] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 819.305962] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-042e97a3-0415-49e7-9816-86d21d0e195f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.315192] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.630s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 819.315700] env[68217]: DEBUG nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 819.318804] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.032s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 819.321944] env[68217]: INFO nova.compute.claims [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 819.345310] env[68217]: DEBUG nova.network.neutron [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 819.379248] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 819.379578] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 819.379750] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleting the datastore file [datastore2] 7a01c2c5-3108-4382-85c5-a5ea5e6e160c {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 819.380038] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eef63dcf-28ae-400e-b6d8-88d4ddd70715 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.388483] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 819.388483] env[68217]: value = "task-2961229" [ 819.388483] env[68217]: _type = "Task" [ 819.388483] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.397734] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961229, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.432364] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961223, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.457703] env[68217]: DEBUG nova.network.neutron [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 819.483140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 819.629052] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961226, 'name': Rename_Task, 'duration_secs': 0.170797} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.629052] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 819.629052] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-779eb2db-9903-494c-98f2-c2503caa6520 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.635514] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 819.635514] env[68217]: value = "task-2961230" [ 819.635514] env[68217]: _type = "Task" [ 819.635514] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.646264] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961230, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.700033] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961224, 'name': ReconfigVM_Task, 'duration_secs': 1.219115} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.700291] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Reconfigured VM instance instance-0000003b to attach disk [datastore1] b7fe971e-353f-427c-896c-32f9de0d70e7/b7fe971e-353f-427c-896c-32f9de0d70e7.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 819.700950] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8886820a-df52-4c8c-81de-340deab93868 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.709143] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 819.709143] env[68217]: value = "task-2961231" [ 819.709143] env[68217]: _type = "Task" [ 819.709143] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.718030] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961231, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.755414] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961227, 'name': Destroy_Task, 'duration_secs': 0.373219} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.755706] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Destroyed the VM [ 819.755924] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 819.756200] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-fd476a0a-fa71-4670-be58-78891fe6c512 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.764652] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 819.764652] env[68217]: value = "task-2961232" [ 819.764652] env[68217]: _type = "Task" [ 819.764652] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 819.775824] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961232, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 819.827265] env[68217]: DEBUG nova.compute.utils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 819.831803] env[68217]: DEBUG nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 819.832012] env[68217]: DEBUG nova.network.neutron [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 819.903411] env[68217]: DEBUG oslo_vmware.api [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961229, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148691} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.903626] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 819.903802] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 819.904097] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 819.904203] env[68217]: INFO nova.compute.manager [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Took 2.90 seconds to destroy the instance on the hypervisor. [ 819.904440] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 819.906885] env[68217]: DEBUG nova.compute.manager [-] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 819.906885] env[68217]: DEBUG nova.network.neutron [-] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 819.927290] env[68217]: DEBUG nova.policy [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '011d38e070744a3fb3c515d5e669ed22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9ba7843e6144cd1877b48bc40cd64f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 819.935290] env[68217]: DEBUG oslo_vmware.api [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961223, 'name': PowerOnVM_Task, 'duration_secs': 1.633222} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 819.935738] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 819.936094] env[68217]: INFO nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Took 11.09 seconds to spawn the instance on the hypervisor. [ 819.936412] env[68217]: DEBUG nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 819.938242] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dac0a9f8-d595-4f9e-a118-66a30a444e5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 819.960123] env[68217]: DEBUG oslo_concurrency.lockutils [req-f348f4d8-3033-4a2d-8a61-57b292730899 req-ade6fe6c-c02e-483a-86f6-2bc77c696576 service nova] Releasing lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 819.960470] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquired lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 819.960630] env[68217]: DEBUG nova.network.neutron [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 820.148553] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961230, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.219826] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961231, 'name': Rename_Task, 'duration_secs': 0.295555} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.220124] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 820.220378] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4982a2e6-e6f9-44cb-8fb6-6c76c1850218 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.227618] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 820.227618] env[68217]: value = "task-2961233" [ 820.227618] env[68217]: _type = "Task" [ 820.227618] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 820.237474] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961233, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.274969] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961232, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.333106] env[68217]: DEBUG nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 820.460685] env[68217]: INFO nova.compute.manager [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Took 41.39 seconds to build instance. [ 820.539495] env[68217]: DEBUG nova.network.neutron [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 820.651321] env[68217]: DEBUG oslo_vmware.api [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961230, 'name': PowerOnVM_Task, 'duration_secs': 0.536352} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.651905] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 820.652147] env[68217]: DEBUG nova.compute.manager [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 820.653251] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5cca65-6bc0-4284-b54f-3bd5cfe8d3b8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.741458] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961233, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 820.773123] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa2198b5-2d01-4994-8891-1271c6651d95 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.779273] env[68217]: DEBUG oslo_vmware.api [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961232, 'name': RemoveSnapshot_Task, 'duration_secs': 0.899501} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 820.779866] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 820.780113] env[68217]: INFO nova.compute.manager [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Took 14.33 seconds to snapshot the instance on the hypervisor. [ 820.785562] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c902b8f-778b-49c2-85d4-c560ee284187 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.819917] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa16761-bb16-46f7-aade-3b4582a831b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.832376] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa3fe76c-aaec-45bb-84ca-dbdd1a5ac9f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.850147] env[68217]: DEBUG nova.compute.provider_tree [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.962855] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2d021367-b596-44a7-b693-06c007014c01 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "23366029-e754-49dc-ba56-7a0d92232d81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.594s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 820.983831] env[68217]: DEBUG nova.network.neutron [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [{"id": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "address": "fa:16:3e:25:6f:1a", "network": {"id": "2a4968db-54c5-475c-9fc7-56d5f6441110", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-553739001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a6348b1f20794ee9a016d409eee576e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f6f347d-9e", "ovs_interfaceid": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.076099] env[68217]: DEBUG nova.network.neutron [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Successfully created port: 04299da0-d810-4014-b79f-1ac8a45e1a8f {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 821.087868] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "41d279f2-477b-44b2-9eb9-7b782c9c890f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.089045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.089045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "41d279f2-477b-44b2-9eb9-7b782c9c890f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.089045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.089045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.091590] env[68217]: INFO nova.compute.manager [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Terminating instance [ 821.147058] env[68217]: DEBUG nova.network.neutron [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Successfully updated port: 9f5258ed-d071-4e53-9f7e-43d85ef5ae04 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 821.170940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.242956] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961233, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.244534] env[68217]: DEBUG nova.network.neutron [-] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.285436] env[68217]: DEBUG nova.compute.manager [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Instance disappeared during snapshot {{(pid=68217) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4595}} [ 821.296470] env[68217]: DEBUG nova.compute.manager [None req-4641cab7-9040-4a33-8741-ea5ab0bb0569 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image not found during clean up 834e3301-26b6-432c-8685-944e0d978890 {{(pid=68217) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4601}} [ 821.334505] env[68217]: DEBUG nova.compute.manager [req-25c5e658-fd44-4c15-ab15-5f67b1b9c44c req-de887e80-ce31-4c00-bb0b-8b47bde4aa8e service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Received event network-vif-plugged-9f5258ed-d071-4e53-9f7e-43d85ef5ae04 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.334765] env[68217]: DEBUG oslo_concurrency.lockutils [req-25c5e658-fd44-4c15-ab15-5f67b1b9c44c req-de887e80-ce31-4c00-bb0b-8b47bde4aa8e service nova] Acquiring lock "149bd497-4ee6-4ca2-9d18-b276e773aedf-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 821.334930] env[68217]: DEBUG oslo_concurrency.lockutils [req-25c5e658-fd44-4c15-ab15-5f67b1b9c44c req-de887e80-ce31-4c00-bb0b-8b47bde4aa8e service nova] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 821.335103] env[68217]: DEBUG oslo_concurrency.lockutils [req-25c5e658-fd44-4c15-ab15-5f67b1b9c44c req-de887e80-ce31-4c00-bb0b-8b47bde4aa8e service nova] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.335311] env[68217]: DEBUG nova.compute.manager [req-25c5e658-fd44-4c15-ab15-5f67b1b9c44c req-de887e80-ce31-4c00-bb0b-8b47bde4aa8e service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] No waiting events found dispatching network-vif-plugged-9f5258ed-d071-4e53-9f7e-43d85ef5ae04 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 821.335489] env[68217]: WARNING nova.compute.manager [req-25c5e658-fd44-4c15-ab15-5f67b1b9c44c req-de887e80-ce31-4c00-bb0b-8b47bde4aa8e service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Received unexpected event network-vif-plugged-9f5258ed-d071-4e53-9f7e-43d85ef5ae04 for instance with vm_state building and task_state spawning. [ 821.335646] env[68217]: DEBUG nova.compute.manager [req-25c5e658-fd44-4c15-ab15-5f67b1b9c44c req-de887e80-ce31-4c00-bb0b-8b47bde4aa8e service nova] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Received event network-vif-deleted-e14e7381-56d1-46d7-ac1f-e49cea827394 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 821.353437] env[68217]: DEBUG nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 821.356810] env[68217]: DEBUG nova.scheduler.client.report [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 821.379195] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 821.379609] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 821.379861] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 821.380084] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 821.380238] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 821.380384] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 821.380588] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 821.380746] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 821.380909] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 821.381083] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 821.381256] env[68217]: DEBUG nova.virt.hardware [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 821.382575] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d16d057f-e144-4c38-b602-650833a94dd4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.392426] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0467d5bf-015d-42b4-9796-660ef6d1a999 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.486957] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Releasing lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 821.488081] env[68217]: DEBUG nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Instance network_info: |[{"id": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "address": "fa:16:3e:25:6f:1a", "network": {"id": "2a4968db-54c5-475c-9fc7-56d5f6441110", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-553739001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a6348b1f20794ee9a016d409eee576e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f6f347d-9e", "ovs_interfaceid": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 821.488612] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:6f:1a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6f6f347d-9ee4-4b18-9c77-67f67ee873e6', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 821.500498] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Creating folder: Project (a6348b1f20794ee9a016d409eee576e3). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.500823] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-aecea0a8-e1c9-4170-bf73-5146e3b592f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.518453] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Created folder: Project (a6348b1f20794ee9a016d409eee576e3) in parent group-v594094. [ 821.518657] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Creating folder: Instances. Parent ref: group-v594270. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 821.518914] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de3eb516-c451-4390-aea6-efe5f1c0be39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.534113] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Created folder: Instances in parent group-v594270. [ 821.534370] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 821.534574] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 821.534785] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-13b7f5c5-e952-4004-a78d-c7e017de88d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.555716] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 821.555716] env[68217]: value = "task-2961236" [ 821.555716] env[68217]: _type = "Task" [ 821.555716] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.567757] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961236, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.596868] env[68217]: DEBUG nova.compute.manager [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 821.597125] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 821.598331] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2677d6ad-222d-46f2-bbff-f43795c77853 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.607302] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 821.607572] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fcbf7de4-29dd-4545-813e-61a08be29120 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.649916] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "refresh_cache-149bd497-4ee6-4ca2-9d18-b276e773aedf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.650480] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "refresh_cache-149bd497-4ee6-4ca2-9d18-b276e773aedf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 821.650480] env[68217]: DEBUG nova.network.neutron [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 821.685505] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 821.685738] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 821.685926] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleting the datastore file [datastore2] 41d279f2-477b-44b2-9eb9-7b782c9c890f {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 821.686282] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6f83e51b-f328-4d39-b6ce-2e2ba436d38d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.696528] env[68217]: DEBUG oslo_vmware.api [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 821.696528] env[68217]: value = "task-2961238" [ 821.696528] env[68217]: _type = "Task" [ 821.696528] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 821.706961] env[68217]: DEBUG oslo_vmware.api [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961238, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 821.741764] env[68217]: DEBUG oslo_vmware.api [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961233, 'name': PowerOnVM_Task, 'duration_secs': 1.034024} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 821.742142] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 821.742449] env[68217]: INFO nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Took 10.31 seconds to spawn the instance on the hypervisor. [ 821.742967] env[68217]: DEBUG nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 821.744144] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761fcc7e-6730-40a5-81e3-694d2194a243 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 821.755216] env[68217]: INFO nova.compute.manager [-] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Took 1.85 seconds to deallocate network for instance. [ 821.861485] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 821.861873] env[68217]: DEBUG nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 821.864624] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 19.087s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.071973] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "d3468ec2-6548-400a-b247-a6ab1156cab5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.072677] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.077568] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961236, 'name': CreateVM_Task, 'duration_secs': 0.401667} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.078220] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 822.078428] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.079721] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.079721] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 822.079721] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60c0928d-17c1-492a-b276-09d458f1b1bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.085244] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 822.085244] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522fa565-db77-37ad-cbd9-48b42d763e4c" [ 822.085244] env[68217]: _type = "Task" [ 822.085244] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.095419] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522fa565-db77-37ad-cbd9-48b42d763e4c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.190536] env[68217]: DEBUG nova.network.neutron [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 822.206825] env[68217]: DEBUG oslo_vmware.api [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961238, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.194866} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.207207] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 822.207411] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 822.207599] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.207772] env[68217]: INFO nova.compute.manager [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Took 0.61 seconds to destroy the instance on the hypervisor. [ 822.208019] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 822.208222] env[68217]: DEBUG nova.compute.manager [-] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 822.208321] env[68217]: DEBUG nova.network.neutron [-] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 822.264921] env[68217]: INFO nova.compute.manager [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Took 43.07 seconds to build instance. [ 822.303403] env[68217]: INFO nova.compute.manager [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Took 0.55 seconds to detach 1 volumes for instance. [ 822.368244] env[68217]: DEBUG nova.compute.utils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 822.372785] env[68217]: INFO nova.compute.claims [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 822.376421] env[68217]: DEBUG nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 822.376577] env[68217]: DEBUG nova.network.neutron [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 822.509713] env[68217]: DEBUG nova.policy [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd621db3bddfd4bd395b5194bbb7f2bd6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd5183f68359f454dbd74f1e475288dd7', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 822.550838] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "ba39e563-3e3a-40aa-815f-760f0f37a55d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.551107] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.551326] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "ba39e563-3e3a-40aa-815f-760f0f37a55d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.551501] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 822.551667] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.554196] env[68217]: INFO nova.compute.manager [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Terminating instance [ 822.575461] env[68217]: DEBUG nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 822.596244] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522fa565-db77-37ad-cbd9-48b42d763e4c, 'name': SearchDatastore_Task, 'duration_secs': 0.010817} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 822.596543] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 822.596780] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 822.597019] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 822.597179] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 822.597350] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 822.597886] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-346bf5ad-3532-4aef-95c8-4626fa8a01c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.608015] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 822.608217] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 822.610792] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d51bb6e9-b496-4e63-bc64-46ec64e22701 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.617193] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 822.617193] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52065490-2a3b-863d-9633-841363f8ba2b" [ 822.617193] env[68217]: _type = "Task" [ 822.617193] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 822.625164] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52065490-2a3b-863d-9633-841363f8ba2b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 822.678122] env[68217]: DEBUG nova.network.neutron [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Updating instance_info_cache with network_info: [{"id": "9f5258ed-d071-4e53-9f7e-43d85ef5ae04", "address": "fa:16:3e:d1:a3:54", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5258ed-d0", "ovs_interfaceid": "9f5258ed-d071-4e53-9f7e-43d85ef5ae04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.766969] env[68217]: DEBUG oslo_concurrency.lockutils [None req-107a63ed-2c95-4f93-a210-d9d865df3051 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.598s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 822.811748] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 822.877368] env[68217]: DEBUG nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 822.883482] env[68217]: INFO nova.compute.resource_tracker [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating resource usage from migration e43f38fa-6f4b-4977-b7ad-6a2e6c8e9fb3 [ 823.057916] env[68217]: DEBUG nova.compute.manager [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 823.058258] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 823.059483] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bca6b7d-bf7f-49ff-b491-2670571c5bdf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.073019] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 823.073019] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67cf59a0-d3c8-4d4c-97ac-65074fa7380c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.078193] env[68217]: DEBUG oslo_vmware.api [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 823.078193] env[68217]: value = "task-2961239" [ 823.078193] env[68217]: _type = "Task" [ 823.078193] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.100760] env[68217]: DEBUG oslo_vmware.api [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.108676] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.126993] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52065490-2a3b-863d-9633-841363f8ba2b, 'name': SearchDatastore_Task, 'duration_secs': 0.0104} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.128044] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2a821335-bbb0-4222-8d43-010463de801c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.135720] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 823.135720] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294c0ca-6582-e8c9-cd76-a3d1fd1d5792" [ 823.135720] env[68217]: _type = "Task" [ 823.135720] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.148755] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294c0ca-6582-e8c9-cd76-a3d1fd1d5792, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.184406] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "refresh_cache-149bd497-4ee6-4ca2-9d18-b276e773aedf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.184758] env[68217]: DEBUG nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Instance network_info: |[{"id": "9f5258ed-d071-4e53-9f7e-43d85ef5ae04", "address": "fa:16:3e:d1:a3:54", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5258ed-d0", "ovs_interfaceid": "9f5258ed-d071-4e53-9f7e-43d85ef5ae04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 823.185815] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d1:a3:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd669e36a-5c9d-4fa4-92c8-90e7cb814262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9f5258ed-d071-4e53-9f7e-43d85ef5ae04', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 823.193891] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 823.196501] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 823.198149] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d9199a1a-c8c2-4f11-a841-df1cf5f71b2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.224620] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 823.224620] env[68217]: value = "task-2961240" [ 823.224620] env[68217]: _type = "Task" [ 823.224620] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.235762] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961240, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.309679] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d46fd3f-cc11-405b-a0eb-c819e873959c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.318275] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee6a11a-2306-47b4-9b86-8ead2fc44741 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.351901] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8faefb5-dc98-4fdc-bb34-ac874a360468 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.360166] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35fe13f2-97e1-4abc-b875-e608cde23445 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.382419] env[68217]: DEBUG nova.compute.provider_tree [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.388558] env[68217]: INFO nova.virt.block_device [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Booting with volume 19dc7d9f-d50d-45f0-8776-4c28a20691ad at /dev/sda [ 823.446865] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ee4bedb1-6186-4d12-849e-9cbb5fb6f392 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.459265] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0149f10-a991-4ae0-a553-51ad23b6b323 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.474625] env[68217]: DEBUG nova.network.neutron [-] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.485838] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "2612f6fc-a43f-4011-8a09-51088a49371a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 823.486107] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 823.510172] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4282f64-d5a9-4b9b-b046-84aaafc0b10f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.525777] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a507c033-e32c-44c8-951c-c874d3eaa284 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.569816] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf31271-629f-422a-b41f-c9bee9cc36d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.577710] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3731d66f-9530-4b3f-8fb9-f853ea5f60cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.593213] env[68217]: DEBUG oslo_vmware.api [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961239, 'name': PowerOffVM_Task, 'duration_secs': 0.43378} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.593213] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 823.593213] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 823.593213] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a37efe5-33eb-43af-8268-66c597ae4a87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.599111] env[68217]: DEBUG nova.virt.block_device [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updating existing volume attachment record: df8b882e-ffb3-4a6d-a189-cfb5d4753e7f {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 823.652073] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294c0ca-6582-e8c9-cd76-a3d1fd1d5792, 'name': SearchDatastore_Task, 'duration_secs': 0.012732} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.652575] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 823.652984] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/2e3dae16-dba3-4230-913d-7a5c3469e36e.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 823.653458] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b3fc8ff5-d6a1-4dbf-9f22-3e3301a64d81 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.663019] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 823.663019] env[68217]: value = "task-2961242" [ 823.663019] env[68217]: _type = "Task" [ 823.663019] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.675240] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961242, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.700182] env[68217]: DEBUG nova.network.neutron [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Successfully updated port: 04299da0-d810-4014-b79f-1ac8a45e1a8f {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 823.700182] env[68217]: DEBUG nova.network.neutron [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Successfully created port: be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 823.740033] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961240, 'name': CreateVM_Task, 'duration_secs': 0.509995} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 823.740033] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 823.740033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.740033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.740033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 823.740359] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fc84ef0-c681-4e23-8119-00f56347b2d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.747105] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 823.747105] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5251fbbe-8fc2-0b68-7cff-1aba1d789a1f" [ 823.747105] env[68217]: _type = "Task" [ 823.747105] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.756159] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5251fbbe-8fc2-0b68-7cff-1aba1d789a1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.810022] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 823.810022] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 823.810022] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleting the datastore file [datastore1] ba39e563-3e3a-40aa-815f-760f0f37a55d {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 823.810022] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6bf90bd5-4cc2-480f-9c7a-2d3912f6d637 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.819023] env[68217]: DEBUG oslo_vmware.api [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 823.819023] env[68217]: value = "task-2961243" [ 823.819023] env[68217]: _type = "Task" [ 823.819023] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 823.828451] env[68217]: DEBUG oslo_vmware.api [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961243, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 823.839216] env[68217]: DEBUG nova.compute.manager [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Received event network-changed-9f5258ed-d071-4e53-9f7e-43d85ef5ae04 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 823.839500] env[68217]: DEBUG nova.compute.manager [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Refreshing instance network info cache due to event network-changed-9f5258ed-d071-4e53-9f7e-43d85ef5ae04. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 823.839824] env[68217]: DEBUG oslo_concurrency.lockutils [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] Acquiring lock "refresh_cache-149bd497-4ee6-4ca2-9d18-b276e773aedf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 823.840276] env[68217]: DEBUG oslo_concurrency.lockutils [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] Acquired lock "refresh_cache-149bd497-4ee6-4ca2-9d18-b276e773aedf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 823.840716] env[68217]: DEBUG nova.network.neutron [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Refreshing network info cache for port 9f5258ed-d071-4e53-9f7e-43d85ef5ae04 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 823.885550] env[68217]: DEBUG nova.scheduler.client.report [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 823.977212] env[68217]: INFO nova.compute.manager [-] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Took 1.77 seconds to deallocate network for instance. [ 823.989624] env[68217]: DEBUG nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 824.180501] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961242, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.204182] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "refresh_cache-aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.204352] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "refresh_cache-aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.204520] env[68217]: DEBUG nova.network.neutron [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 824.259992] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5251fbbe-8fc2-0b68-7cff-1aba1d789a1f, 'name': SearchDatastore_Task, 'duration_secs': 0.012901} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.260325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 824.260561] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 824.260789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.260933] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.261120] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 824.261386] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c62dea71-b83c-495d-8ddb-ede1bdd54c30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.272828] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 824.273122] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 824.273870] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33334544-b240-4c04-95b0-f99317b6a5dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.281382] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 824.281382] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524ce9c3-5a0b-9e94-3141-efe853007cff" [ 824.281382] env[68217]: _type = "Task" [ 824.281382] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.290612] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524ce9c3-5a0b-9e94-3141-efe853007cff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.330208] env[68217]: DEBUG oslo_vmware.api [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961243, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.456736} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.330436] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 824.330639] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 824.330813] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 824.330979] env[68217]: INFO nova.compute.manager [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Took 1.27 seconds to destroy the instance on the hypervisor. [ 824.331235] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 824.331421] env[68217]: DEBUG nova.compute.manager [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 824.331517] env[68217]: DEBUG nova.network.neutron [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 824.390627] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.526s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 824.390938] env[68217]: INFO nova.compute.manager [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Migrating [ 824.391107] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 824.391247] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "compute-rpcapi-router" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 824.392457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.875s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 824.394057] env[68217]: INFO nova.compute.claims [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 824.484616] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.517189] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 824.680023] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961242, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.538713} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.680437] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/2e3dae16-dba3-4230-913d-7a5c3469e36e.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 824.680689] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 824.681124] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17c20e5e-d3ef-46c7-b7a5-9b706307abd0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.689618] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 824.689618] env[68217]: value = "task-2961244" [ 824.689618] env[68217]: _type = "Task" [ 824.689618] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.702620] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961244, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.793148] env[68217]: DEBUG nova.network.neutron [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 824.805494] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524ce9c3-5a0b-9e94-3141-efe853007cff, 'name': SearchDatastore_Task, 'duration_secs': 0.013451} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 824.806336] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1ad3b56-3114-4099-aa54-5d36d79e9ca2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.813548] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 824.813548] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea9b7c-2d9b-b216-5359-43b4520e651e" [ 824.813548] env[68217]: _type = "Task" [ 824.813548] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 824.822587] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea9b7c-2d9b-b216-5359-43b4520e651e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 824.891685] env[68217]: DEBUG nova.network.neutron [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Updated VIF entry in instance network info cache for port 9f5258ed-d071-4e53-9f7e-43d85ef5ae04. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 824.892040] env[68217]: DEBUG nova.network.neutron [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Updating instance_info_cache with network_info: [{"id": "9f5258ed-d071-4e53-9f7e-43d85ef5ae04", "address": "fa:16:3e:d1:a3:54", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9f5258ed-d0", "ovs_interfaceid": "9f5258ed-d071-4e53-9f7e-43d85ef5ae04", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 824.900424] env[68217]: INFO nova.compute.rpcapi [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Automatically selected compute RPC version 6.4 from minimum service version 69 [ 824.900927] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "compute-rpcapi-router" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.081845] env[68217]: DEBUG nova.network.neutron [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Updating instance_info_cache with network_info: [{"id": "04299da0-d810-4014-b79f-1ac8a45e1a8f", "address": "fa:16:3e:6f:9d:be", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04299da0-d8", "ovs_interfaceid": "04299da0-d810-4014-b79f-1ac8a45e1a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.203289] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961244, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071925} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.205826] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 825.206930] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbcfd666-1ba8-43ed-b080-95c66c66bdb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.233118] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/2e3dae16-dba3-4230-913d-7a5c3469e36e.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 825.236185] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-46fb1f37-1fd6-44d1-af35-78bdadbf882d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.259132] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 825.259132] env[68217]: value = "task-2961245" [ 825.259132] env[68217]: _type = "Task" [ 825.259132] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.271629] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961245, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.324490] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea9b7c-2d9b-b216-5359-43b4520e651e, 'name': SearchDatastore_Task, 'duration_secs': 0.011752} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.324801] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.325100] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 149bd497-4ee6-4ca2-9d18-b276e773aedf/149bd497-4ee6-4ca2-9d18-b276e773aedf.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 825.325392] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f1725e7-ed0a-41bf-a95a-0f5cec9add7d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.334954] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 825.334954] env[68217]: value = "task-2961246" [ 825.334954] env[68217]: _type = "Task" [ 825.334954] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.345446] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961246, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.370129] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a570ad72-f4f7-4333-8066-f4829f477c67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.379262] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d369f7f-96a0-4585-ac8c-f416a3c590f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.411772] env[68217]: DEBUG oslo_concurrency.lockutils [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] Releasing lock "refresh_cache-149bd497-4ee6-4ca2-9d18-b276e773aedf" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.412169] env[68217]: DEBUG nova.compute.manager [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Received event network-vif-deleted-e5987781-918b-4d17-8151-7b4661f8b9d3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.412338] env[68217]: DEBUG nova.compute.manager [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Received event network-vif-plugged-04299da0-d810-4014-b79f-1ac8a45e1a8f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.412565] env[68217]: DEBUG oslo_concurrency.lockutils [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] Acquiring lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 825.412789] env[68217]: DEBUG oslo_concurrency.lockutils [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 825.413186] env[68217]: DEBUG oslo_concurrency.lockutils [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 825.413186] env[68217]: DEBUG nova.compute.manager [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] No waiting events found dispatching network-vif-plugged-04299da0-d810-4014-b79f-1ac8a45e1a8f {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 825.413303] env[68217]: WARNING nova.compute.manager [req-8e3526ca-b8b8-4013-b9ae-9e1e96edbd10 req-8526893a-ae99-403e-b113-55ce3116014a service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Received unexpected event network-vif-plugged-04299da0-d810-4014-b79f-1ac8a45e1a8f for instance with vm_state building and task_state spawning. [ 825.415438] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70091371-194e-4c8b-92cb-7573ed37d273 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.424141] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51a6d70d-ef95-4053-921b-e6c692762004 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.429873] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.430067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.430274] env[68217]: DEBUG nova.network.neutron [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 825.441368] env[68217]: DEBUG nova.compute.provider_tree [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 825.491943] env[68217]: DEBUG nova.network.neutron [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.572788] env[68217]: DEBUG nova.compute.manager [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 825.573796] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-585a7bba-8d80-4e03-9a37-e78f9f4afd4b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.585613] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "refresh_cache-aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 825.585946] env[68217]: DEBUG nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance network_info: |[{"id": "04299da0-d810-4014-b79f-1ac8a45e1a8f", "address": "fa:16:3e:6f:9d:be", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04299da0-d8", "ovs_interfaceid": "04299da0-d810-4014-b79f-1ac8a45e1a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 825.586490] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:9d:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04299da0-d810-4014-b79f-1ac8a45e1a8f', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 825.594883] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating folder: Project (c9ba7843e6144cd1877b48bc40cd64f3). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.595224] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a33127de-9345-49fb-9289-c37fa5996eb8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.615935] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created folder: Project (c9ba7843e6144cd1877b48bc40cd64f3) in parent group-v594094. [ 825.616213] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating folder: Instances. Parent ref: group-v594274. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 825.616530] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-358c5c58-9fff-4c94-bf0c-b08f51f7dbf8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.633694] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created folder: Instances in parent group-v594274. [ 825.633993] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 825.634231] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 825.634457] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b9e1efd-df1e-4ce1-b5e1-8ecfb4bfb025 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.665287] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 825.665287] env[68217]: value = "task-2961249" [ 825.665287] env[68217]: _type = "Task" [ 825.665287] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.677415] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961249, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.738508] env[68217]: DEBUG nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 825.739234] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 825.739494] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 825.739678] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 825.739929] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 825.740098] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 825.740248] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 825.740537] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 825.740771] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 825.740966] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 825.741166] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 825.741358] env[68217]: DEBUG nova.virt.hardware [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 825.742871] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0c8038-0cf2-43b8-87f6-3e5f6365ab35 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.754438] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e92aca-a7aa-40ec-9499-bcb618c292b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.781527] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961245, 'name': ReconfigVM_Task, 'duration_secs': 0.294012} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 825.781847] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/2e3dae16-dba3-4230-913d-7a5c3469e36e.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 825.782802] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0b4cf07e-f8a0-4b3c-9054-378a7e9ed812 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 825.795771] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 825.795771] env[68217]: value = "task-2961250" [ 825.795771] env[68217]: _type = "Task" [ 825.795771] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 825.815522] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961250, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.845460] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961246, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 825.911705] env[68217]: DEBUG nova.compute.manager [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Received event network-changed-04299da0-d810-4014-b79f-1ac8a45e1a8f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 825.911879] env[68217]: DEBUG nova.compute.manager [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Refreshing instance network info cache due to event network-changed-04299da0-d810-4014-b79f-1ac8a45e1a8f. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 825.912173] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] Acquiring lock "refresh_cache-aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.912359] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] Acquired lock "refresh_cache-aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 825.912631] env[68217]: DEBUG nova.network.neutron [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Refreshing network info cache for port 04299da0-d810-4014-b79f-1ac8a45e1a8f {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 825.939215] env[68217]: DEBUG nova.network.neutron [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Successfully updated port: be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 825.947366] env[68217]: DEBUG nova.scheduler.client.report [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 825.989036] env[68217]: DEBUG nova.network.neutron [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance_info_cache with network_info: [{"id": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "address": "fa:16:3e:0c:de:50", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1428ec3-01", "ovs_interfaceid": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 825.997250] env[68217]: INFO nova.compute.manager [-] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Took 1.66 seconds to deallocate network for instance. [ 826.086974] env[68217]: INFO nova.compute.manager [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] instance snapshotting [ 826.089763] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77bd34a2-8d68-481b-9a48-038a87646bdf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.111344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d9d175f-ba86-463c-9213-55fdd28f226e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.175771] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961249, 'name': CreateVM_Task, 'duration_secs': 0.47694} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.175771] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 826.176380] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.178393] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.178393] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 826.178393] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-346a4f98-cfee-4112-8fbb-c51d427a93de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.182649] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 826.182649] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e093d-efa5-d1d5-3537-04a40d940660" [ 826.182649] env[68217]: _type = "Task" [ 826.182649] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.190681] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e093d-efa5-d1d5-3537-04a40d940660, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.306565] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961250, 'name': Rename_Task, 'duration_secs': 0.160823} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.306867] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 826.307133] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-09f35ac9-901d-46e8-b0f2-63738a66b44d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.314852] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 826.314852] env[68217]: value = "task-2961251" [ 826.314852] env[68217]: _type = "Task" [ 826.314852] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.322724] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.347085] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961246, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.514164} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.347407] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 149bd497-4ee6-4ca2-9d18-b276e773aedf/149bd497-4ee6-4ca2-9d18-b276e773aedf.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 826.347676] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 826.347980] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-faf05cea-d9ad-4675-a548-d1a1b313642a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.356498] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 826.356498] env[68217]: value = "task-2961252" [ 826.356498] env[68217]: _type = "Task" [ 826.356498] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.365148] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961252, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.444685] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquiring lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.444929] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquired lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.445045] env[68217]: DEBUG nova.network.neutron [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 826.453801] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.061s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 826.454444] env[68217]: DEBUG nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 826.457510] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 20.602s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 826.457723] env[68217]: DEBUG nova.objects.instance [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lazy-loading 'resources' on Instance uuid dfeeed37-8c84-4ecc-87ea-f4239f512fb1 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 826.496601] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.502430] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 826.623051] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 826.623565] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b7da9be3-cc6a-4d55-9b03-e98b51d5a30e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.634728] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 826.634728] env[68217]: value = "task-2961253" [ 826.634728] env[68217]: _type = "Task" [ 826.634728] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.644536] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961253, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.698611] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e093d-efa5-d1d5-3537-04a40d940660, 'name': SearchDatastore_Task, 'duration_secs': 0.010747} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.698916] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 826.699169] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 826.699401] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.699549] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 826.699756] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 826.700200] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6288890b-6a51-4ca8-92b3-a44b460e6355 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.712598] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 826.712807] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 826.713618] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2b14188a-428f-4c66-917a-d007ac1b26b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.719451] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 826.719451] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529b0868-4534-b4b1-3a2b-75f60278b7bb" [ 826.719451] env[68217]: _type = "Task" [ 826.719451] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.729628] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529b0868-4534-b4b1-3a2b-75f60278b7bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.742877] env[68217]: DEBUG nova.network.neutron [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Updated VIF entry in instance network info cache for port 04299da0-d810-4014-b79f-1ac8a45e1a8f. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 826.743243] env[68217]: DEBUG nova.network.neutron [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Updating instance_info_cache with network_info: [{"id": "04299da0-d810-4014-b79f-1ac8a45e1a8f", "address": "fa:16:3e:6f:9d:be", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap04299da0-d8", "ovs_interfaceid": "04299da0-d810-4014-b79f-1ac8a45e1a8f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.825732] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961251, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.866674] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961252, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069491} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 826.866951] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 826.867748] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d630e5-1fb1-46bb-aeaf-78c9bd117f95 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.891919] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Reconfiguring VM instance instance-0000003d to attach disk [datastore1] 149bd497-4ee6-4ca2-9d18-b276e773aedf/149bd497-4ee6-4ca2-9d18-b276e773aedf.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 826.892621] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d24d941d-1656-404e-b44f-b08bcbc4cb78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.914312] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 826.914312] env[68217]: value = "task-2961254" [ 826.914312] env[68217]: _type = "Task" [ 826.914312] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 826.923126] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961254, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 826.960451] env[68217]: DEBUG nova.compute.utils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 826.964033] env[68217]: DEBUG nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 826.964033] env[68217]: DEBUG nova.network.neutron [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 827.014024] env[68217]: DEBUG nova.network.neutron [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 827.084607] env[68217]: DEBUG nova.policy [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '011d38e070744a3fb3c515d5e669ed22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9ba7843e6144cd1877b48bc40cd64f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 827.146754] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961253, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.235021] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529b0868-4534-b4b1-3a2b-75f60278b7bb, 'name': SearchDatastore_Task, 'duration_secs': 0.010213} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.235021] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23961465-41ae-451a-9268-d2ecc7abb1f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.241599] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 827.241599] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dae40a-78a2-cfee-1335-6a70d74e10d9" [ 827.241599] env[68217]: _type = "Task" [ 827.241599] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.245545] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] Releasing lock "refresh_cache-aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.245834] env[68217]: DEBUG nova.compute.manager [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Received event network-vif-deleted-b029c255-6f3f-41b3-ba5c-16ca2a968c6e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 827.246062] env[68217]: DEBUG nova.compute.manager [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Received event network-vif-plugged-be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 827.246406] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] Acquiring lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 827.246687] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 827.246905] env[68217]: DEBUG oslo_concurrency.lockutils [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 827.247143] env[68217]: DEBUG nova.compute.manager [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] No waiting events found dispatching network-vif-plugged-be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 827.247361] env[68217]: WARNING nova.compute.manager [req-a9bfa00a-23c3-4f0c-adc4-2fad123eb9cd req-a99d514d-c2db-4ad5-b78b-8a989674c09f service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Received unexpected event network-vif-plugged-be6bc9f0-6147-4638-b306-5affbda64885 for instance with vm_state building and task_state spawning. [ 827.259277] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dae40a-78a2-cfee-1335-6a70d74e10d9, 'name': SearchDatastore_Task, 'duration_secs': 0.01066} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.259588] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.259891] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 827.260208] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-10999079-2e66-442c-b267-eacd01636b5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.268405] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 827.268405] env[68217]: value = "task-2961255" [ 827.268405] env[68217]: _type = "Task" [ 827.268405] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.283204] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961255, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.330126] env[68217]: DEBUG oslo_vmware.api [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961251, 'name': PowerOnVM_Task, 'duration_secs': 0.784867} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.333961] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 827.334164] env[68217]: INFO nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Took 11.25 seconds to spawn the instance on the hypervisor. [ 827.334411] env[68217]: DEBUG nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 827.335644] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d77fe8ac-4270-4bdd-8d1a-c6917d5b75ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.379082] env[68217]: DEBUG nova.network.neutron [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updating instance_info_cache with network_info: [{"id": "be6bc9f0-6147-4638-b306-5affbda64885", "address": "fa:16:3e:1b:f5:93", "network": {"id": "f4daa25a-4a3f-4e39-bbe4-55ca4773aeab", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-580485113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5183f68359f454dbd74f1e475288dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe6bc9f0-61", "ovs_interfaceid": "be6bc9f0-6147-4638-b306-5affbda64885", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.432532] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961254, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.437606] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc049f8a-5b6b-411d-a34e-a4b073494f1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.447657] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fccda0a3-9a0c-4233-88b8-25ee78d9f708 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.486130] env[68217]: DEBUG nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 827.493744] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a96db105-828a-4bb3-ab9e-c7a80c6835a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.506921] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82a2cda7-f63b-4b98-b1e8-cf5a7a01a7bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.524846] env[68217]: DEBUG nova.compute.provider_tree [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.647752] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961253, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.782601] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961255, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510101} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 827.782974] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 827.783260] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 827.783575] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0d225616-ff58-4083-9704-4fdc59bc1330 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.792862] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 827.792862] env[68217]: value = "task-2961256" [ 827.792862] env[68217]: _type = "Task" [ 827.792862] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.800083] env[68217]: DEBUG nova.network.neutron [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Successfully created port: b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 827.809841] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961256, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.856413] env[68217]: INFO nova.compute.manager [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Took 34.71 seconds to build instance. [ 827.883019] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Releasing lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 827.883366] env[68217]: DEBUG nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance network_info: |[{"id": "be6bc9f0-6147-4638-b306-5affbda64885", "address": "fa:16:3e:1b:f5:93", "network": {"id": "f4daa25a-4a3f-4e39-bbe4-55ca4773aeab", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-580485113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5183f68359f454dbd74f1e475288dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe6bc9f0-61", "ovs_interfaceid": "be6bc9f0-6147-4638-b306-5affbda64885", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 827.884062] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1b:f5:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '496ac502-bfc4-4324-8332-cac473eb7cc4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'be6bc9f0-6147-4638-b306-5affbda64885', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 827.891621] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Creating folder: Project (d5183f68359f454dbd74f1e475288dd7). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 827.892189] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8223c2e6-c4ad-41c7-810a-128302c8e4f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.907586] env[68217]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 827.907731] env[68217]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68217) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 827.908042] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Folder already exists: Project (d5183f68359f454dbd74f1e475288dd7). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 827.908239] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Creating folder: Instances. Parent ref: group-v594235. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 827.908831] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-db567aea-e0f3-4b77-9568-78cc03185f3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.920658] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Created folder: Instances in parent group-v594235. [ 827.920905] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 827.921513] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 827.921731] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ff0fd9c3-7ae5-419b-9093-a30e9ab8c1b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.940827] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961254, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 827.947704] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 827.947704] env[68217]: value = "task-2961259" [ 827.947704] env[68217]: _type = "Task" [ 827.947704] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 827.956327] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961259, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.017186] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c5e4413-26bf-4830-8c73-37b3ea1bf1de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.038948] env[68217]: DEBUG nova.scheduler.client.report [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 828.043444] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance 'bd62c682-24f2-4559-887a-03186409f699' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 828.147901] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961253, 'name': CreateSnapshot_Task, 'duration_secs': 1.123691} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.148026] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 828.148729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d005659-26b3-4a13-ab0d-e321efd72727 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.154377] env[68217]: DEBUG nova.compute.manager [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Received event network-changed-be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 828.154515] env[68217]: DEBUG nova.compute.manager [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Refreshing instance network info cache due to event network-changed-be6bc9f0-6147-4638-b306-5affbda64885. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 828.154761] env[68217]: DEBUG oslo_concurrency.lockutils [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] Acquiring lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.154921] env[68217]: DEBUG oslo_concurrency.lockutils [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] Acquired lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.155060] env[68217]: DEBUG nova.network.neutron [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Refreshing network info cache for port be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 828.303899] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961256, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072226} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.304190] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 828.304985] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa38500-6747-457b-80ef-90bf93f72d86 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.329932] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 828.330612] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b64a051c-65e6-40f9-ade8-da60d23b109f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.355796] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 828.355796] env[68217]: value = "task-2961260" [ 828.355796] env[68217]: _type = "Task" [ 828.355796] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.360386] env[68217]: DEBUG oslo_concurrency.lockutils [None req-eda7b92e-9fd0-4303-a0ae-5135d263a215 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 58.149s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.367012] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961260, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.427381] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961254, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.457721] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961259, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.494036] env[68217]: INFO nova.compute.manager [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Rescuing [ 828.494310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 828.494464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquired lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 828.494630] env[68217]: DEBUG nova.network.neutron [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 828.505609] env[68217]: DEBUG nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 828.529738] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 828.529979] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 828.530154] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 828.530335] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 828.530574] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 828.530722] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 828.530928] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 828.531098] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 828.531613] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 828.531613] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 828.531613] env[68217]: DEBUG nova.virt.hardware [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 828.532482] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c36276dd-9bbd-4cc9-b501-22ea70ef735e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.541518] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60f37df0-1619-48e2-abbb-9fab0217d334 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.547481] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.090s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 828.558653] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 828.558965] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.780s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.560424] env[68217]: INFO nova.compute.claims [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 828.563380] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2d6009cb-6431-401c-846f-cec936b6f5d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.571803] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 828.571803] env[68217]: value = "task-2961261" [ 828.571803] env[68217]: _type = "Task" [ 828.571803] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.581566] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961261, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.598164] env[68217]: INFO nova.scheduler.client.report [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Deleted allocations for instance dfeeed37-8c84-4ecc-87ea-f4239f512fb1 [ 828.671267] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 828.671864] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-893933c8-2fb6-462f-b461-fc7bd4852a0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.682718] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 828.682718] env[68217]: value = "task-2961262" [ 828.682718] env[68217]: _type = "Task" [ 828.682718] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.696705] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961262, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.850223] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 828.850557] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 828.850941] env[68217]: INFO nova.compute.manager [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Shelving [ 828.867034] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961260, 'name': ReconfigVM_Task, 'duration_secs': 0.341042} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 828.867743] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Reconfigured VM instance instance-0000003f to attach disk [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 828.868658] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3199f914-086e-4f66-aaba-2d392cacb4f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.878438] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 828.878438] env[68217]: value = "task-2961263" [ 828.878438] env[68217]: _type = "Task" [ 828.878438] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 828.889365] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961263, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.935610] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961254, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 828.965186] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961259, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.079066] env[68217]: DEBUG nova.network.neutron [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updated VIF entry in instance network info cache for port be6bc9f0-6147-4638-b306-5affbda64885. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 829.079452] env[68217]: DEBUG nova.network.neutron [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updating instance_info_cache with network_info: [{"id": "be6bc9f0-6147-4638-b306-5affbda64885", "address": "fa:16:3e:1b:f5:93", "network": {"id": "f4daa25a-4a3f-4e39-bbe4-55ca4773aeab", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-580485113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5183f68359f454dbd74f1e475288dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe6bc9f0-61", "ovs_interfaceid": "be6bc9f0-6147-4638-b306-5affbda64885", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.084475] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961261, 'name': PowerOffVM_Task, 'duration_secs': 0.200489} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.085516] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 829.085716] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance 'bd62c682-24f2-4559-887a-03186409f699' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 829.104820] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a287769a-9250-495d-95a0-3533254c0b31 tempest-FloatingIPsAssociationTestJSON-864662715 tempest-FloatingIPsAssociationTestJSON-864662715-project-member] Lock "dfeeed37-8c84-4ecc-87ea-f4239f512fb1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 27.391s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 829.198040] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961262, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.315135] env[68217]: DEBUG nova.network.neutron [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [{"id": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "address": "fa:16:3e:25:6f:1a", "network": {"id": "2a4968db-54c5-475c-9fc7-56d5f6441110", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-553739001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a6348b1f20794ee9a016d409eee576e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f6f347d-9e", "ovs_interfaceid": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.390706] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961263, 'name': Rename_Task, 'duration_secs': 0.169986} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.390978] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.391250] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5e1684cf-0f46-4ce3-b8f5-3286afe9dbf5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.399319] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 829.399319] env[68217]: value = "task-2961264" [ 829.399319] env[68217]: _type = "Task" [ 829.399319] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.410246] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961264, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.426746] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961254, 'name': ReconfigVM_Task, 'duration_secs': 2.050766} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.427048] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Reconfigured VM instance instance-0000003d to attach disk [datastore1] 149bd497-4ee6-4ca2-9d18-b276e773aedf/149bd497-4ee6-4ca2-9d18-b276e773aedf.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 829.427928] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-50f6a357-d747-45c0-bcf9-e94da3dd4477 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.440715] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 829.440715] env[68217]: value = "task-2961265" [ 829.440715] env[68217]: _type = "Task" [ 829.440715] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.451938] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961265, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.461432] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961259, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.586430] env[68217]: DEBUG oslo_concurrency.lockutils [req-a2b03239-a131-4824-9d36-2e9854d542c5 req-b9dcb6cf-7fc4-43ab-84ea-f46674af943b service nova] Releasing lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.593174] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 829.593174] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 829.593174] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 829.593174] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 829.593417] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 829.593417] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 829.593417] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 829.593417] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 829.593595] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 829.594075] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 829.594075] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 829.603818] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-30a93277-0d53-454a-998b-5211d28de919 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.622879] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 829.622879] env[68217]: value = "task-2961266" [ 829.622879] env[68217]: _type = "Task" [ 829.622879] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.639254] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961266, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.672857] env[68217]: DEBUG nova.network.neutron [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Successfully updated port: b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 829.699015] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961262, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.817910] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Releasing lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 829.862808] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 829.863973] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5e6b43bf-99f9-4115-a04f-fb462f3f8b01 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.878095] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 829.878095] env[68217]: value = "task-2961267" [ 829.878095] env[68217]: _type = "Task" [ 829.878095] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.888409] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961267, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.913249] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961264, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.956139] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961265, 'name': Rename_Task, 'duration_secs': 0.212128} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.963176] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 829.964364] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-68e0cc0f-07b9-48f2-9ba8-4b90e56ae789 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.972471] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961259, 'name': CreateVM_Task, 'duration_secs': 1.564399} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 829.973798] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 829.974156] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 829.974156] env[68217]: value = "task-2961268" [ 829.974156] env[68217]: _type = "Task" [ 829.974156] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 829.974825] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'attachment_id': 'df8b882e-ffb3-4a6d-a189-cfb5d4753e7f', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594242', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'name': 'volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95e625e9-a726-4c3c-be66-7b8ce93b5f8a', 'attached_at': '', 'detached_at': '', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'serial': '19dc7d9f-d50d-45f0-8776-4c28a20691ad'}, 'guest_format': None, 'delete_on_termination': True, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=68217) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 829.975089] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Root volume attach. Driver type: vmdk {{(pid=68217) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 829.976082] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eaf98987-a12e-41e4-8aa0-170e362074bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.992733] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961268, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 829.996785] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9ef30ec-c2f5-45b2-8b78-feacdf672be8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.006205] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d7fa7c4-5b4a-48b3-997f-1ace442647d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.021528] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-76a6a79d-3717-49bc-bf79-d9ff56ba1166 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.041826] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 830.041826] env[68217]: value = "task-2961269" [ 830.041826] env[68217]: _type = "Task" [ 830.041826] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.059332] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.099174] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37b3205d-5894-4ab7-8091-9b4e0e00aad9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.108845] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52ae9d8b-84ac-440c-a6c6-58af81cfc6ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.149332] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83810a7-d727-4c53-a4a2-2ad84f7c565c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.158458] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961266, 'name': ReconfigVM_Task, 'duration_secs': 0.208784} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.160820] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance 'bd62c682-24f2-4559-887a-03186409f699' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 830.165986] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a798ebf-7582-4d28-b5dd-a5ecb65b8e71 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.184022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "refresh_cache-ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.184022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "refresh_cache-ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 830.184022] env[68217]: DEBUG nova.network.neutron [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 830.185522] env[68217]: DEBUG nova.compute.provider_tree [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 830.191211] env[68217]: DEBUG nova.compute.manager [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Received event network-vif-plugged-b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.191211] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] Acquiring lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 830.191211] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 830.191506] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 830.191754] env[68217]: DEBUG nova.compute.manager [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] No waiting events found dispatching network-vif-plugged-b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 830.192010] env[68217]: WARNING nova.compute.manager [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Received unexpected event network-vif-plugged-b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 for instance with vm_state building and task_state spawning. [ 830.192163] env[68217]: DEBUG nova.compute.manager [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Received event network-changed-b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 830.192379] env[68217]: DEBUG nova.compute.manager [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Refreshing instance network info cache due to event network-changed-b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 830.192654] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] Acquiring lock "refresh_cache-ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 830.204862] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961262, 'name': CloneVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.392376] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961267, 'name': PowerOffVM_Task, 'duration_secs': 0.301658} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.392695] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 830.393605] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-150ce4cd-d0c0-4734-a76a-fe61ec5037bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.419662] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7add6ea1-facb-4180-a9f5-6add8ced4a57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.427172] env[68217]: DEBUG oslo_vmware.api [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961264, 'name': PowerOnVM_Task, 'duration_secs': 0.561832} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.429489] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.429775] env[68217]: INFO nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Took 9.08 seconds to spawn the instance on the hypervisor. [ 830.431132] env[68217]: DEBUG nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.436075] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcb0750a-f72c-4582-90b8-b48c77788fdd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.488244] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961268, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.557129] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 40%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.673901] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 830.674289] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 830.674551] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 830.674914] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 830.675112] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 830.675313] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 830.675564] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 830.675832] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 830.676304] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 830.676526] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 830.676825] env[68217]: DEBUG nova.virt.hardware [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 830.683905] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Reconfiguring VM instance instance-00000034 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 830.684302] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0965ea92-f5ba-40ab-b918-a03d04a2ece5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.700820] env[68217]: DEBUG nova.scheduler.client.report [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 830.718566] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961262, 'name': CloneVM_Task, 'duration_secs': 1.601257} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.721592] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Created linked-clone VM from snapshot [ 830.721592] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 830.721592] env[68217]: value = "task-2961270" [ 830.721592] env[68217]: _type = "Task" [ 830.721592] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.724109] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06ea3c05-f4ce-4fea-b43d-bbf963a9e632 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.737425] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961270, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.741908] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Uploading image eb53a15e-0b00-464f-a26f-76295dd750db {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 830.766424] env[68217]: DEBUG nova.network.neutron [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 830.769982] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 830.769982] env[68217]: value = "vm-594280" [ 830.769982] env[68217]: _type = "VirtualMachine" [ 830.769982] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 830.770643] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-55e3214f-a059-4e5e-99a0-44893d22ff7c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.781678] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lease: (returnval){ [ 830.781678] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a753df-0e23-7662-e274-657abfd383c3" [ 830.781678] env[68217]: _type = "HttpNfcLease" [ 830.781678] env[68217]: } obtained for exporting VM: (result){ [ 830.781678] env[68217]: value = "vm-594280" [ 830.781678] env[68217]: _type = "VirtualMachine" [ 830.781678] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 830.781992] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the lease: (returnval){ [ 830.781992] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a753df-0e23-7662-e274-657abfd383c3" [ 830.781992] env[68217]: _type = "HttpNfcLease" [ 830.781992] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 830.794285] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 830.794285] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a753df-0e23-7662-e274-657abfd383c3" [ 830.794285] env[68217]: _type = "HttpNfcLease" [ 830.794285] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 830.938937] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 830.939821] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-32ab22bf-cd48-4ae6-ba1e-9e62ce0ec7e5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 830.961042] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 830.961042] env[68217]: value = "task-2961272" [ 830.961042] env[68217]: _type = "Task" [ 830.961042] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 830.963587] env[68217]: INFO nova.compute.manager [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Took 29.76 seconds to build instance. [ 830.972700] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961272, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 830.989229] env[68217]: DEBUG oslo_vmware.api [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961268, 'name': PowerOnVM_Task, 'duration_secs': 0.731797} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 830.990439] env[68217]: DEBUG nova.network.neutron [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Updating instance_info_cache with network_info: [{"id": "b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4", "address": "fa:16:3e:71:38:42", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1fdb191-64", "ovs_interfaceid": "b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 830.991750] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 830.992396] env[68217]: INFO nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Took 12.28 seconds to spawn the instance on the hypervisor. [ 830.992396] env[68217]: DEBUG nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 830.993444] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b45b56b-c7ae-4802-a04c-857d0662218c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.058582] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.208431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.649s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.208959] env[68217]: DEBUG nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 831.211986] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.442s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 831.241538] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961270, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.292040] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 831.292040] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a753df-0e23-7662-e274-657abfd383c3" [ 831.292040] env[68217]: _type = "HttpNfcLease" [ 831.292040] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 831.292490] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 831.292490] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a753df-0e23-7662-e274-657abfd383c3" [ 831.292490] env[68217]: _type = "HttpNfcLease" [ 831.292490] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 831.293358] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-524dedbc-6bff-4827-9fe2-cf8fdf3180fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.302727] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e5d22f-cbf0-d6ad-bdca-b161e6232165/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 831.302958] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e5d22f-cbf0-d6ad-bdca-b161e6232165/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 831.366970] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 831.367745] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d78f96c0-0b7a-4190-9137-6c7d67db7fae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.378705] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 831.378705] env[68217]: value = "task-2961273" [ 831.378705] env[68217]: _type = "Task" [ 831.378705] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.389481] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961273, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.423776] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b30604c8-f4bf-4557-bc51-5113efe048e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.466776] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c692a654-289c-4160-9d94-475da16c6b82 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 40.400s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 831.471991] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961272, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.494162] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "refresh_cache-ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 831.494593] env[68217]: DEBUG nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Instance network_info: |[{"id": "b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4", "address": "fa:16:3e:71:38:42", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1fdb191-64", "ovs_interfaceid": "b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 831.495045] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] Acquired lock "refresh_cache-ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 831.495336] env[68217]: DEBUG nova.network.neutron [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Refreshing network info cache for port b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 831.496819] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:38:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 831.506559] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 831.514226] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 831.518079] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5973c553-a5ed-417f-9a31-e91440243192 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.533959] env[68217]: INFO nova.compute.manager [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Took 33.45 seconds to build instance. [ 831.544136] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 831.544136] env[68217]: value = "task-2961274" [ 831.544136] env[68217]: _type = "Task" [ 831.544136] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.562073] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961274, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.563046] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 65%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.717138] env[68217]: DEBUG nova.compute.utils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 831.739154] env[68217]: DEBUG nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 831.739349] env[68217]: DEBUG nova.network.neutron [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 831.753439] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961270, 'name': ReconfigVM_Task, 'duration_secs': 0.734729} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.758091] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Reconfigured VM instance instance-00000034 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 831.758091] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e52748-facc-4979-921c-30aad5454d23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.786935] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Reconfiguring VM instance instance-00000034 to attach disk [datastore2] bd62c682-24f2-4559-887a-03186409f699/bd62c682-24f2-4559-887a-03186409f699.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 831.787638] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f188120-939f-419b-a5ee-6cb4a29b2b10 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.809562] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 831.809562] env[68217]: value = "task-2961275" [ 831.809562] env[68217]: _type = "Task" [ 831.809562] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 831.823901] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961275, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 831.892515] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961273, 'name': PowerOffVM_Task, 'duration_secs': 0.354234} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 831.894983] env[68217]: DEBUG nova.policy [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85faaafec6b64b64a4173f056aa7ac09', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bde4bb32b82948dd991d1fb8890c991b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 831.897462] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 831.898661] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ce389a-c71e-48e0-92ec-774976fd969b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.928192] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a824c3fd-0813-4627-b3de-108bae3d4b6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.972715] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961272, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.036758] env[68217]: DEBUG oslo_concurrency.lockutils [None req-948c0108-7e52-46d2-a67d-a8ffeb963fc6 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 50.303s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 832.063359] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961274, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.067277] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 78%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.102722] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 832.103217] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4c96f68-27b2-413c-ba52-097055c367a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.114967] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 832.114967] env[68217]: value = "task-2961276" [ 832.114967] env[68217]: _type = "Task" [ 832.114967] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.132529] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 832.132529] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 832.132529] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.132529] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 832.132898] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 832.135063] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a4ddd5be-dad0-46bc-9926-63427b9d7084 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.145631] env[68217]: DEBUG nova.network.neutron [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Updated VIF entry in instance network info cache for port b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 832.145741] env[68217]: DEBUG nova.network.neutron [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Updating instance_info_cache with network_info: [{"id": "b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4", "address": "fa:16:3e:71:38:42", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb1fdb191-64", "ovs_interfaceid": "b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.150278] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 832.150401] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 832.152358] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d075881-8e57-4527-ad88-effada221bb1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.163315] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 832.163315] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5212bb87-b9f0-75f7-068b-b061af188076" [ 832.163315] env[68217]: _type = "Task" [ 832.163315] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.175218] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5212bb87-b9f0-75f7-068b-b061af188076, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.240306] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Applying migration context for instance bd62c682-24f2-4559-887a-03186409f699 as it has an incoming, in-progress migration e43f38fa-6f4b-4977-b7ad-6a2e6c8e9fb3. Migration status is migrating {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 832.243743] env[68217]: INFO nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating resource usage from migration e43f38fa-6f4b-4977-b7ad-6a2e6c8e9fb3 [ 832.247693] env[68217]: DEBUG nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 832.284701] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.285524] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 58c15727-79ae-404f-a054-d71e3be498cc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.286099] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b5e15801-301a-4ee6-87d2-bbf749967631 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 832.286249] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance da1524a7-2756-4429-ada2-b1f493544bd2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.286370] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 580e6909-7d05-447a-a378-f0b8b71f059a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.286488] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d0d8ed27-003e-43e2-8a07-041420a2c758 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.286603] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 0552d616-a406-4dfa-8a70-82f39fb98bbc actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.286719] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d14026b1-84dd-430e-be94-94dcb1f47473 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.286826] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7584180b-efa6-4038-9f3a-619ab7937553 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.286935] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.287059] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.287172] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e550084b-84dd-4ae8-8667-2edb45b49e2b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.287636] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 41d279f2-477b-44b2-9eb9-7b782c9c890f is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 832.287965] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7a01c2c5-3108-4382-85c5-a5ea5e6e160c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 832.288209] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance ba39e563-3e3a-40aa-815f-760f0f37a55d is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 832.288383] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e642c93b-ca48-4d23-9abb-ff243855d8d0 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 832.288588] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.288760] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 23366029-e754-49dc-ba56-7a0d92232d81 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289013] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b7fe971e-353f-427c-896c-32f9de0d70e7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289149] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 2e3dae16-dba3-4230-913d-7a5c3469e36e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289281] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 149bd497-4ee6-4ca2-9d18-b276e773aedf actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289395] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289544] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289613] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289726] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Migration e43f38fa-6f4b-4977-b7ad-6a2e6c8e9fb3 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 832.289836] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance bd62c682-24f2-4559-887a-03186409f699 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.289943] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 09290e60-7751-408e-9d6d-20e7cb61767b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 832.323754] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961275, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.475069] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961272, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.532685] env[68217]: DEBUG nova.network.neutron [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Successfully created port: 1eb632e3-fe01-4d72-a4ea-834af75497ef {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 832.568658] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961274, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.569967] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 92%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.651158] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ada6692-0304-497a-8c6e-efa329053b74 req-61e95a10-922f-4b53-96a4-65d75e05d5ed service nova] Releasing lock "refresh_cache-ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 832.699058] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5212bb87-b9f0-75f7-068b-b061af188076, 'name': SearchDatastore_Task, 'duration_secs': 0.019699} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.699446] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da740ddf-6038-4517-baf8-018296d72094 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.708862] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 832.708862] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5220a12f-429b-1ed6-9681-bfcc498ce11a" [ 832.708862] env[68217]: _type = "Task" [ 832.708862] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 832.722119] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5220a12f-429b-1ed6-9681-bfcc498ce11a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 832.794811] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d3468ec2-6548-400a-b247-a6ab1156cab5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 832.825250] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961275, 'name': ReconfigVM_Task, 'duration_secs': 1.008607} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.825899] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Reconfigured VM instance instance-00000034 to attach disk [datastore2] bd62c682-24f2-4559-887a-03186409f699/bd62c682-24f2-4559-887a-03186409f699.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 832.826306] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance 'bd62c682-24f2-4559-887a-03186409f699' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 832.976535] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961272, 'name': CreateSnapshot_Task, 'duration_secs': 1.919824} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 832.976829] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 832.978077] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b91f7f-ffee-49e5-ac43-64add8d27b0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.062130] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961274, 'name': CreateVM_Task, 'duration_secs': 1.226919} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.062947] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 833.063949] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.064164] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.064803] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 833.069081] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1e12f0db-9101-46cf-b306-c84264cfe3f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.071174] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 97%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.079042] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 833.079042] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52529602-9fec-7dca-7524-a3a3704cfebd" [ 833.079042] env[68217]: _type = "Task" [ 833.079042] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.085725] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52529602-9fec-7dca-7524-a3a3704cfebd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.144867] env[68217]: DEBUG nova.compute.manager [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 833.145831] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde8c330-3b62-4c00-84b2-ec0b9ce622dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.220361] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5220a12f-429b-1ed6-9681-bfcc498ce11a, 'name': SearchDatastore_Task, 'duration_secs': 0.018842} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.220789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.221196] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. {{(pid=68217) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 833.221595] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-37346f6e-6f88-4a84-93cf-fb225bbc9bc1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.230931] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 833.230931] env[68217]: value = "task-2961277" [ 833.230931] env[68217]: _type = "Task" [ 833.230931] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.241686] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961277, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.261480] env[68217]: DEBUG nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 833.296663] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 833.296973] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 833.297189] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 833.297442] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 833.297614] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 833.297798] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 833.298100] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 833.298295] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 833.298505] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 833.298742] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 833.298948] env[68217]: DEBUG nova.virt.hardware [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 833.299806] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 2612f6fc-a43f-4011-8a09-51088a49371a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 833.300557] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 22 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 833.300786] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=4800MB phys_disk=200GB used_disk=21GB total_vcpus=48 used_vcpus=22 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 833.304400] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f81747-f68a-474c-9063-bc5bd7439ae5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.315177] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18f7a97-62e2-46ae-8a94-f3ed63b40667 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.339802] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e58007c-c4ce-463c-b40d-3288a64f1ff1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.364181] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d0cf4d-eb8b-4834-b6fb-f4024e12e897 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.407167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance 'bd62c682-24f2-4559-887a-03186409f699' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 833.501537] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 833.502291] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-63780e00-9b29-4b3f-950b-eef163ea47bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.516924] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 833.516924] env[68217]: value = "task-2961278" [ 833.516924] env[68217]: _type = "Task" [ 833.516924] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.530441] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961278, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.566892] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task} progress is 98%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.595825] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52529602-9fec-7dca-7524-a3a3704cfebd, 'name': SearchDatastore_Task, 'duration_secs': 0.020513} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 833.596550] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 833.596820] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 833.597114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.597303] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 833.597502] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 833.597844] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c173a905-b0e3-48ff-9ced-6637ae5e2e8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.620882] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 833.621134] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 833.624908] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f36362c-8e84-4784-a3cb-b0e7a5d26507 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.637227] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 833.637227] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5201c29e-26ab-50f3-7ac9-e25cd4008b8f" [ 833.637227] env[68217]: _type = "Task" [ 833.637227] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 833.654822] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5201c29e-26ab-50f3-7ac9-e25cd4008b8f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.660428] env[68217]: INFO nova.compute.manager [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] instance snapshotting [ 833.664121] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d2e20e5-028b-4cf4-a397-58c04a4488c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.695387] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce1c987d-98aa-4e5c-a6ab-c41da53fcb17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.744142] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961277, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 833.875113] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b0c8a0-48c7-4791-94ac-b6853c615dfd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.885781] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5155b02-15b6-40df-a129-c21bbcb204d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.931727] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf2f4b2d-55e8-40a0-bb05-365b8cefde47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.941054] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e0e108-7172-4dbf-ad56-0a2fe1c82851 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.957433] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.970038] env[68217]: DEBUG nova.network.neutron [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Port d1428ec3-01c4-4a36-9a5b-dba91c81f279 binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 834.031743] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961278, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.065388] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961269, 'name': RelocateVM_Task, 'duration_secs': 3.752312} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.065734] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 834.065858] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594242', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'name': 'volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95e625e9-a726-4c3c-be66-7b8ce93b5f8a', 'attached_at': '', 'detached_at': '', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'serial': '19dc7d9f-d50d-45f0-8776-4c28a20691ad'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 834.066986] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33472898-ddc6-4171-a753-c10d249680cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.084912] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d181be-f384-4329-8a67-003d75f225b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.109547] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Reconfiguring VM instance instance-0000003e to attach disk [datastore1] volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad/volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.109741] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-eeb3c65b-7032-4693-b877-6e9c2498dbd1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.133359] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 834.133359] env[68217]: value = "task-2961279" [ 834.133359] env[68217]: _type = "Task" [ 834.133359] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.147020] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961279, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.151611] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5201c29e-26ab-50f3-7ac9-e25cd4008b8f, 'name': SearchDatastore_Task, 'duration_secs': 0.019282} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.152604] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bf929d1-8916-42cd-b903-0b8026f6606d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.160807] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 834.160807] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529407cd-8f93-291a-bc1e-6c16794503d6" [ 834.160807] env[68217]: _type = "Task" [ 834.160807] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.171424] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529407cd-8f93-291a-bc1e-6c16794503d6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.210695] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 834.211320] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-100149a5-1f35-4c58-ac41-0c51aa7a17d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.222608] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 834.222608] env[68217]: value = "task-2961280" [ 834.222608] env[68217]: _type = "Task" [ 834.222608] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.232371] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961280, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.242457] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961277, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.72777} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.242725] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. [ 834.243533] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b5d525f-7c02-46cf-8860-15454f2c3c68 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.270331] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Reconfiguring VM instance instance-0000003c to attach disk [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 834.270628] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b36affd2-1ce0-470a-9106-2cd9af65f10f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.289451] env[68217]: DEBUG nova.compute.manager [req-65e9655d-4f78-4003-a363-2ce3a7bc12dd req-3f4bce21-0096-46ab-93e1-ac7b3872db24 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Received event network-vif-plugged-1eb632e3-fe01-4d72-a4ea-834af75497ef {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 834.289649] env[68217]: DEBUG oslo_concurrency.lockutils [req-65e9655d-4f78-4003-a363-2ce3a7bc12dd req-3f4bce21-0096-46ab-93e1-ac7b3872db24 service nova] Acquiring lock "09290e60-7751-408e-9d6d-20e7cb61767b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.289861] env[68217]: DEBUG oslo_concurrency.lockutils [req-65e9655d-4f78-4003-a363-2ce3a7bc12dd req-3f4bce21-0096-46ab-93e1-ac7b3872db24 service nova] Lock "09290e60-7751-408e-9d6d-20e7cb61767b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.290131] env[68217]: DEBUG oslo_concurrency.lockutils [req-65e9655d-4f78-4003-a363-2ce3a7bc12dd req-3f4bce21-0096-46ab-93e1-ac7b3872db24 service nova] Lock "09290e60-7751-408e-9d6d-20e7cb61767b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.290328] env[68217]: DEBUG nova.compute.manager [req-65e9655d-4f78-4003-a363-2ce3a7bc12dd req-3f4bce21-0096-46ab-93e1-ac7b3872db24 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] No waiting events found dispatching network-vif-plugged-1eb632e3-fe01-4d72-a4ea-834af75497ef {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 834.290577] env[68217]: WARNING nova.compute.manager [req-65e9655d-4f78-4003-a363-2ce3a7bc12dd req-3f4bce21-0096-46ab-93e1-ac7b3872db24 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Received unexpected event network-vif-plugged-1eb632e3-fe01-4d72-a4ea-834af75497ef for instance with vm_state building and task_state spawning. [ 834.292933] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 834.292933] env[68217]: value = "task-2961281" [ 834.292933] env[68217]: _type = "Task" [ 834.292933] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.304460] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961281, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.461324] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 834.533125] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961278, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.556856] env[68217]: DEBUG nova.network.neutron [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Successfully updated port: 1eb632e3-fe01-4d72-a4ea-834af75497ef {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 834.646534] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961279, 'name': ReconfigVM_Task, 'duration_secs': 0.376922} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.646902] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Reconfigured VM instance instance-0000003e to attach disk [datastore1] volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad/volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.651727] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5ffbb28e-007e-4fc3-9601-90ae4f0afd7e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.673098] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529407cd-8f93-291a-bc1e-6c16794503d6, 'name': SearchDatastore_Task, 'duration_secs': 0.014464} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.675564] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 834.676100] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe/ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 834.676400] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 834.676400] env[68217]: value = "task-2961282" [ 834.676400] env[68217]: _type = "Task" [ 834.676400] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.676875] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1d7bbc2-4a86-44b6-9c5b-ad1ad26c96fc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.691505] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961282, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.693046] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 834.693046] env[68217]: value = "task-2961283" [ 834.693046] env[68217]: _type = "Task" [ 834.693046] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.702043] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961283, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.734921] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961280, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.804338] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961281, 'name': ReconfigVM_Task, 'duration_secs': 0.466701} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 834.804730] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Reconfigured VM instance instance-0000003c to attach disk [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 834.805741] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6dad4e-459f-4276-9cbe-2a90925994c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.837948] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a7ef3266-8f7c-487c-aa03-b86d39755b55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.859395] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 834.859395] env[68217]: value = "task-2961284" [ 834.859395] env[68217]: _type = "Task" [ 834.859395] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 834.880812] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961284, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 834.967023] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 834.967278] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.755s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.967578] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.182s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.967827] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.970164] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.487s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.970371] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 834.972603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.802s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.972801] env[68217]: DEBUG nova.objects.instance [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 834.975696] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 834.975893] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Cleaning up deleted instances {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 834.994194] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "bd62c682-24f2-4559-887a-03186409f699-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 834.994474] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 834.996181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.017213] env[68217]: INFO nova.scheduler.client.report [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Deleted allocations for instance e642c93b-ca48-4d23-9abb-ff243855d8d0 [ 835.018747] env[68217]: INFO nova.scheduler.client.report [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Deleted allocations for instance b5e15801-301a-4ee6-87d2-bbf749967631 [ 835.045715] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961278, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.061201] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.063161] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 835.063161] env[68217]: DEBUG nova.network.neutron [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 835.191978] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961282, 'name': ReconfigVM_Task, 'duration_secs': 0.226027} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.192599] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594242', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'name': 'volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95e625e9-a726-4c3c-be66-7b8ce93b5f8a', 'attached_at': '', 'detached_at': '', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'serial': '19dc7d9f-d50d-45f0-8776-4c28a20691ad'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 835.193443] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c721a6e7-9538-47e0-8bf0-9fa4012fae0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.208186] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961283, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.210527] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 835.210527] env[68217]: value = "task-2961285" [ 835.210527] env[68217]: _type = "Task" [ 835.210527] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.220954] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961285, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.236322] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961280, 'name': CreateSnapshot_Task, 'duration_secs': 0.839746} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.236877] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 835.238177] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e61cb12a-8c0b-4f42-9c70-38442d2a17f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.373793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "62628aed-e2f9-478f-bed7-00757fc3c484" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 835.373793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "62628aed-e2f9-478f-bed7-00757fc3c484" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.374278] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961284, 'name': ReconfigVM_Task, 'duration_secs': 0.256903} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.375131] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.375396] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d124121e-9e35-4b54-a65b-5f8978f71579 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.384567] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 835.384567] env[68217]: value = "task-2961286" [ 835.384567] env[68217]: _type = "Task" [ 835.384567] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.401025] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961286, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.497589] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] There are 39 instances to clean {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 835.497589] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 693d6a74-a671-4d02-8798-cd3975507428] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 835.531422] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961278, 'name': CloneVM_Task, 'duration_secs': 1.835379} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.531752] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Created linked-clone VM from snapshot [ 835.535033] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41663be0-5436-4540-a857-5926ea159316 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.537776] env[68217]: DEBUG oslo_concurrency.lockutils [None req-07fc0567-55ba-4a3d-9faf-25ea89c0a162 tempest-SecurityGroupsTestJSON-1197000054 tempest-SecurityGroupsTestJSON-1197000054-project-member] Lock "b5e15801-301a-4ee6-87d2-bbf749967631" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.049s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.542121] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62f86779-7e15-4a6b-ac33-fa0303959847 tempest-ServerPasswordTestJSON-1693546442 tempest-ServerPasswordTestJSON-1693546442-project-member] Lock "e642c93b-ca48-4d23-9abb-ff243855d8d0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.878s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.549750] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Uploading image 31c3d1c5-dcbd-447b-935c-0ac48e805003 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 835.583745] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 835.583745] env[68217]: value = "vm-594283" [ 835.583745] env[68217]: _type = "VirtualMachine" [ 835.583745] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 835.584423] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-0abfca67-3ff6-49e4-b445-8bcc30e461e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.594457] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lease: (returnval){ [ 835.594457] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527bdb9a-db80-c269-bc00-7cef8405ef8e" [ 835.594457] env[68217]: _type = "HttpNfcLease" [ 835.594457] env[68217]: } obtained for exporting VM: (result){ [ 835.594457] env[68217]: value = "vm-594283" [ 835.594457] env[68217]: _type = "VirtualMachine" [ 835.594457] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 835.596910] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the lease: (returnval){ [ 835.596910] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527bdb9a-db80-c269-bc00-7cef8405ef8e" [ 835.596910] env[68217]: _type = "HttpNfcLease" [ 835.596910] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 835.599661] env[68217]: DEBUG nova.network.neutron [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 835.603713] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 835.603713] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527bdb9a-db80-c269-bc00-7cef8405ef8e" [ 835.603713] env[68217]: _type = "HttpNfcLease" [ 835.603713] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 835.706647] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961283, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.654477} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.706994] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe/ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 835.707302] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 835.707635] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d3fa0caf-b1c4-410c-b99b-39766f2cf4e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.719039] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 835.719039] env[68217]: value = "task-2961288" [ 835.719039] env[68217]: _type = "Task" [ 835.719039] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.732357] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961285, 'name': Rename_Task, 'duration_secs': 0.24801} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 835.732357] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 835.732740] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-98aab5cb-5539-422f-8afd-3640c4dc1084 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.741054] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961288, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.744760] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 835.744760] env[68217]: value = "task-2961289" [ 835.744760] env[68217]: _type = "Task" [ 835.744760] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.762758] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 835.763105] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961289, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.763348] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-578020c8-c8d2-41ab-9d55-a9c61b30ee91 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.772875] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 835.772875] env[68217]: value = "task-2961290" [ 835.772875] env[68217]: _type = "Task" [ 835.772875] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 835.777063] env[68217]: DEBUG nova.network.neutron [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Updating instance_info_cache with network_info: [{"id": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "address": "fa:16:3e:ec:b6:8c", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1eb632e3-fe", "ovs_interfaceid": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.787330] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.875946] env[68217]: DEBUG nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 835.899762] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961286, 'name': PowerOnVM_Task} progress is 96%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 835.987026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c47d6d7f-c7f6-42b2-9ebc-957c7fd978d8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.987026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.174s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.987026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 835.988447] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.880s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 835.991015] env[68217]: INFO nova.compute.claims [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 836.002709] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 156ea1ad-6e52-4848-915d-7ba74c606e6e] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 836.026525] env[68217]: INFO nova.scheduler.client.report [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted allocations for instance 7a01c2c5-3108-4382-85c5-a5ea5e6e160c [ 836.079287] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.079486] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.079768] env[68217]: DEBUG nova.network.neutron [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 836.110651] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 836.110651] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527bdb9a-db80-c269-bc00-7cef8405ef8e" [ 836.110651] env[68217]: _type = "HttpNfcLease" [ 836.110651] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 836.110651] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 836.110651] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527bdb9a-db80-c269-bc00-7cef8405ef8e" [ 836.110651] env[68217]: _type = "HttpNfcLease" [ 836.110651] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 836.111581] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23005367-b108-4fe9-8022-e5063e7c09bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.126854] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524b2e94-7459-0e2d-c87d-3b8c263a3042/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 836.127097] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524b2e94-7459-0e2d-c87d-3b8c263a3042/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 836.232807] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961288, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.132068} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.233192] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 836.234113] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e030e398-f076-44fe-be91-51fe9aaf189d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.261072] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe/ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 836.267293] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9ca1c32b-aa61-4ea8-a972-035b521460c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.282198] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 836.282588] env[68217]: DEBUG nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Instance network_info: |[{"id": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "address": "fa:16:3e:ec:b6:8c", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1eb632e3-fe", "ovs_interfaceid": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 836.282896] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4cd9b145-d0ac-489a-9064-cb342e32be6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.288377] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ec:b6:8c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4162774e-ec80-4d85-aeb4-fae77f197393', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1eb632e3-fe01-4d72-a4ea-834af75497ef', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 836.295750] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 836.300473] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 836.301078] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961289, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.304900] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2a1a4dff-c91f-4bdd-8820-0f3878b9874c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.320356] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 836.320356] env[68217]: value = "task-2961291" [ 836.320356] env[68217]: _type = "Task" [ 836.320356] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.325246] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.333764] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 836.333764] env[68217]: value = "task-2961292" [ 836.333764] env[68217]: _type = "Task" [ 836.333764] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.342738] env[68217]: DEBUG nova.compute.manager [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Received event network-changed-1eb632e3-fe01-4d72-a4ea-834af75497ef {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 836.342886] env[68217]: DEBUG nova.compute.manager [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Refreshing instance network info cache due to event network-changed-1eb632e3-fe01-4d72-a4ea-834af75497ef. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 836.343135] env[68217]: DEBUG oslo_concurrency.lockutils [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] Acquiring lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.343299] env[68217]: DEBUG oslo_concurrency.lockutils [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] Acquired lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.343542] env[68217]: DEBUG nova.network.neutron [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Refreshing network info cache for port 1eb632e3-fe01-4d72-a4ea-834af75497ef {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 836.348021] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961291, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.356808] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961292, 'name': CreateVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.397958] env[68217]: DEBUG oslo_vmware.api [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961286, 'name': PowerOnVM_Task, 'duration_secs': 0.784065} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.398345] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 836.401557] env[68217]: DEBUG nova.compute.manager [None req-87f3d239-0b52-45f2-8812-33a8cf6a3738 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 836.402534] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f23ed54b-9dd3-4d6b-b8ef-ee5f8f207a96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.406593] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 836.509259] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 7ec30097-1151-4b0d-8226-e4d34ea7b3c9] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 836.540891] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ea270b6-d018-4e6c-af76-df1e4e88a109 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "7a01c2c5-3108-4382-85c5-a5ea5e6e160c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.043s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 836.771185] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961289, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.796439] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 836.854319] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961291, 'name': ReconfigVM_Task, 'duration_secs': 0.49325} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.856203] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Reconfigured VM instance instance-00000040 to attach disk [datastore2] ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe/ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 836.858367] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e66391e9-a77e-4f41-bc5a-96b42a9ac665 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.870265] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961292, 'name': CreateVM_Task, 'duration_secs': 0.435894} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 836.873214] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 836.873214] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 836.873214] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 836.873214] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 836.873214] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abcecbed-ac4f-402a-be29-15505d854305 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.879816] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 836.879816] env[68217]: value = "task-2961293" [ 836.879816] env[68217]: _type = "Task" [ 836.879816] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.881666] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 836.881666] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5278d26c-d177-ab13-6b4f-5448027bc200" [ 836.881666] env[68217]: _type = "Task" [ 836.881666] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 836.898587] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961293, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.014001] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 71243775-e8df-4cc5-85c9-d64a244b4426] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 837.042223] env[68217]: DEBUG nova.network.neutron [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance_info_cache with network_info: [{"id": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "address": "fa:16:3e:0c:de:50", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1428ec3-01", "ovs_interfaceid": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.237668] env[68217]: DEBUG nova.network.neutron [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Updated VIF entry in instance network info cache for port 1eb632e3-fe01-4d72-a4ea-834af75497ef. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 837.238068] env[68217]: DEBUG nova.network.neutron [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Updating instance_info_cache with network_info: [{"id": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "address": "fa:16:3e:ec:b6:8c", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1eb632e3-fe", "ovs_interfaceid": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.262225] env[68217]: DEBUG oslo_vmware.api [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961289, 'name': PowerOnVM_Task, 'duration_secs': 1.131698} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.266350] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 837.266754] env[68217]: INFO nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Took 11.53 seconds to spawn the instance on the hypervisor. [ 837.268056] env[68217]: DEBUG nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 837.268619] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc63136b-942d-4a44-83f0-06683dd9a22f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.299430] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.403998] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5278d26c-d177-ab13-6b4f-5448027bc200, 'name': SearchDatastore_Task, 'duration_secs': 0.032108} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.410847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.411868] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 837.411868] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 837.411868] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 837.412241] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 837.413342] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961293, 'name': Rename_Task, 'duration_secs': 0.203362} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.413587] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b1e1dcb-cf34-4919-b16d-d41057d3be84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.416029] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 837.416575] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-64436e0d-995a-4318-abe9-3e1b0216977c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.426025] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 837.426025] env[68217]: value = "task-2961294" [ 837.426025] env[68217]: _type = "Task" [ 837.426025] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.436238] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 837.436238] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 837.436897] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe6463ed-b199-4604-86ab-5eabb1e59128 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.450361] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961294, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.456455] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 837.456455] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52665f12-8e28-3500-2d3c-e6fe0116b8c5" [ 837.456455] env[68217]: _type = "Task" [ 837.456455] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.468034] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52665f12-8e28-3500-2d3c-e6fe0116b8c5, 'name': SearchDatastore_Task, 'duration_secs': 0.018012} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.468804] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-16c039a6-aed1-45fa-894c-426f6c29e5dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.481979] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 837.481979] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2835d-b000-01b1-d077-ca193fdbe136" [ 837.481979] env[68217]: _type = "Task" [ 837.481979] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 837.494170] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2835d-b000-01b1-d077-ca193fdbe136, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.521838] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 3d03e0b7-0469-4041-a7d5-7768326eb3b5] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 837.546140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.556200] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85629b9-3cb8-4eef-b290-48ba952e289e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.569919] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eedd96a-8334-4e66-a999-fc240d5a89fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.617862] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6511c9-bbaf-4cd0-b861-3cce62d3edcd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.634022] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45045768-da44-4a5a-acba-861a1e1dfa5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.653995] env[68217]: DEBUG nova.compute.provider_tree [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.745769] env[68217]: DEBUG oslo_concurrency.lockutils [req-d9a823e3-1e5b-4653-9d6e-9ab52610c5ad req-3b577ea5-7748-4feb-bed5-7c8ef6db1ae5 service nova] Releasing lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.798407] env[68217]: INFO nova.compute.manager [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Took 36.53 seconds to build instance. [ 837.804074] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.944037] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961294, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 837.994755] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2835d-b000-01b1-d077-ca193fdbe136, 'name': SearchDatastore_Task, 'duration_secs': 0.016846} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 837.995966] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 837.995966] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/09290e60-7751-408e-9d6d-20e7cb61767b.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 837.996186] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3ff9e6c-d7d9-4216-ae2b-987e39d7fe22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.005743] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 838.005743] env[68217]: value = "task-2961295" [ 838.005743] env[68217]: _type = "Task" [ 838.005743] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.023816] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961295, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.024518] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 38787c7d-a9cf-4ce6-a112-c1ec259697ca] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 838.089262] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f0defe-990f-4934-831c-55fe618053af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.116542] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5cc975-16db-453f-aae3-15591cd01cf0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.126786] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance 'bd62c682-24f2-4559-887a-03186409f699' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 838.159230] env[68217]: DEBUG nova.scheduler.client.report [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 838.302444] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f65320a9-3a20-4e70-a1df-e728494c798e tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 49.445s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.303247] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.444208] env[68217]: DEBUG oslo_vmware.api [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961294, 'name': PowerOnVM_Task, 'duration_secs': 0.756371} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 838.444730] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 838.444987] env[68217]: INFO nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Took 9.94 seconds to spawn the instance on the hypervisor. [ 838.445676] env[68217]: DEBUG nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 838.447246] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a81ddea3-5680-40d2-afec-857963e5b258 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.520743] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961295, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.533120] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 9d2b3670-ef8a-477a-b876-7a8fe37fa065] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 838.639262] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 838.639776] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-30d45e26-80ea-4709-ab2d-3540800abfde {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.650280] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 838.650280] env[68217]: value = "task-2961296" [ 838.650280] env[68217]: _type = "Task" [ 838.650280] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.662883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.674s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.663807] env[68217]: DEBUG nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 838.668416] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.668846] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.184s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.669178] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 838.672509] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.155s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 838.675206] env[68217]: INFO nova.compute.claims [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.719441] env[68217]: INFO nova.scheduler.client.report [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted allocations for instance 41d279f2-477b-44b2-9eb9-7b782c9c890f [ 838.778293] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e5d22f-cbf0-d6ad-bdca-b161e6232165/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 838.780140] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a152c7-5497-43ff-bbc4-55499c09363f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.792157] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e5d22f-cbf0-d6ad-bdca-b161e6232165/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 838.792426] env[68217]: ERROR oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e5d22f-cbf0-d6ad-bdca-b161e6232165/disk-0.vmdk due to incomplete transfer. [ 838.796079] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-9e2085b8-5a4a-4b38-a155-b4ccea8a8ef9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.806073] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.808363] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e5d22f-cbf0-d6ad-bdca-b161e6232165/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 838.808591] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Uploaded image eb53a15e-0b00-464f-a26f-76295dd750db to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 838.811994] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 838.812640] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-1481bc89-d307-46b2-9a23-89df1cc9f20c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.820555] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 838.820555] env[68217]: value = "task-2961297" [ 838.820555] env[68217]: _type = "Task" [ 838.820555] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 838.832602] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961297, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 838.968713] env[68217]: INFO nova.compute.manager [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Took 33.47 seconds to build instance. [ 839.021652] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961295, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.62414} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.022044] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/09290e60-7751-408e-9d6d-20e7cb61767b.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 839.022292] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 839.022664] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7ad036bd-28da-4c99-bacf-52ebf7255530 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.032199] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 839.032199] env[68217]: value = "task-2961298" [ 839.032199] env[68217]: _type = "Task" [ 839.032199] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.036121] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e089c20e-b788-4e6c-9bd2-9ad485305582] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 839.044472] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961298, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.162920] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961296, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.180651] env[68217]: DEBUG nova.compute.utils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 839.187919] env[68217]: DEBUG nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 839.187919] env[68217]: DEBUG nova.network.neutron [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 839.229694] env[68217]: DEBUG oslo_concurrency.lockutils [None req-30ea8a57-9852-4122-9d5c-56c0e0f88170 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "41d279f2-477b-44b2-9eb9-7b782c9c890f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.141s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.306009] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.308415] env[68217]: DEBUG nova.policy [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36276be4c67c4abfa0941293d4cc800b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebfeb38b81794c558c1164cecd7fa221', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 839.336360] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961297, 'name': Destroy_Task, 'duration_secs': 0.393903} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.336893] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Destroyed the VM [ 839.338225] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 839.338534] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6a9ec9b5-43f0-4ca0-9969-148e9193cd27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.348073] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 839.348073] env[68217]: value = "task-2961299" [ 839.348073] env[68217]: _type = "Task" [ 839.348073] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.360241] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961299, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.470644] env[68217]: DEBUG oslo_concurrency.lockutils [None req-32bcae01-d144-436b-84e0-ff0a13934748 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.922s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 839.543285] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: dc45d268-7a7f-4e65-b6fa-942ddba69b03] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 839.545522] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961298, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081845} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.546201] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 839.547328] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0068468a-b3bd-470b-95ec-064e337bf070 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.579980] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/09290e60-7751-408e-9d6d-20e7cb61767b.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 839.581135] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2bab2ca-9ca3-49d7-8fec-67e23ddfc681 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.612171] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 839.612171] env[68217]: value = "task-2961300" [ 839.612171] env[68217]: _type = "Task" [ 839.612171] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 839.624982] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961300, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.664961] env[68217]: DEBUG oslo_vmware.api [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961296, 'name': PowerOnVM_Task, 'duration_secs': 0.582398} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 839.665445] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 839.666048] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-40ec5198-d9b0-44f5-9196-537a6c941616 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance 'bd62c682-24f2-4559-887a-03186409f699' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 839.692942] env[68217]: DEBUG nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 839.806527] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 839.863831] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961299, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.048852] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: a4a88f10-937a-4fa6-aa15-eb7f669e77d0] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 840.127094] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961300, 'name': ReconfigVM_Task, 'duration_secs': 0.412342} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.127769] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/09290e60-7751-408e-9d6d-20e7cb61767b.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 840.128597] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-23d5d715-f80e-4b98-8422-5084bf12df21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.136243] env[68217]: DEBUG nova.network.neutron [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Successfully created port: 2edb7766-2fb2-49a8-8100-5abcb17581eb {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 840.145356] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 840.145356] env[68217]: value = "task-2961301" [ 840.145356] env[68217]: _type = "Task" [ 840.145356] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.157568] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961301, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.232738] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c8f14bf-d5f3-479f-a5a5-f091363722e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.242119] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef75399f-8729-4f67-8767-ae84de507623 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.278494] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849b4e24-3c15-4b18-bf4c-2af60283f707 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.288264] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9faf12a-416c-4732-bfed-983dcdaa946c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.307090] env[68217]: DEBUG nova.compute.provider_tree [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.318971] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961290, 'name': CloneVM_Task, 'duration_secs': 4.158821} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.319254] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Created linked-clone VM from snapshot [ 840.320963] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cfc09b-b5ea-467b-9ef9-a3e4a0616393 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.333147] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Uploading image 25135da3-3ff8-44c1-b73f-304cfd777743 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 840.346053] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 840.346885] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b0cc474e-1b90-4d62-85e6-0e0aaf5934e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.368720] env[68217]: DEBUG oslo_vmware.api [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961299, 'name': RemoveSnapshot_Task, 'duration_secs': 0.746484} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.369738] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 840.369840] env[68217]: INFO nova.compute.manager [None req-e7e09b79-cf26-4b68-a8cf-1d2a449f8925 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Took 14.28 seconds to snapshot the instance on the hypervisor. [ 840.376026] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 840.376026] env[68217]: value = "task-2961302" [ 840.376026] env[68217]: _type = "Task" [ 840.376026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.384380] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961302, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.546998] env[68217]: DEBUG nova.compute.manager [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Received event network-changed-be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 840.548430] env[68217]: DEBUG nova.compute.manager [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Refreshing instance network info cache due to event network-changed-be6bc9f0-6147-4638-b306-5affbda64885. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 840.548755] env[68217]: DEBUG oslo_concurrency.lockutils [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] Acquiring lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.548993] env[68217]: DEBUG oslo_concurrency.lockutils [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] Acquired lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 840.549311] env[68217]: DEBUG nova.network.neutron [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Refreshing network info cache for port be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 840.553802] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 1675982e-0702-482b-9fe6-fd4eb9d83311] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 840.621885] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "49ec4f5f-043a-4d16-a371-947571c3b090" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 840.622551] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "49ec4f5f-043a-4d16-a371-947571c3b090" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 840.659197] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961301, 'name': Rename_Task, 'duration_secs': 0.19789} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 840.659536] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 840.659797] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b3b1b598-6959-43e4-8b38-c93fa8a2e14d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.675323] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 840.675323] env[68217]: value = "task-2961303" [ 840.675323] env[68217]: _type = "Task" [ 840.675323] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.692038] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961303, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.706390] env[68217]: DEBUG nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 840.747181] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 840.747548] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 840.747826] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 840.747917] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 840.748116] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 840.748364] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 840.748677] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 840.748918] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 840.749133] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 840.749249] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 840.749444] env[68217]: DEBUG nova.virt.hardware [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 840.750501] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3da5405-318e-4831-ab5d-ee85d0ddd492 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.765645] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0768dc-6a57-4037-b889-366a451170d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.815148] env[68217]: DEBUG nova.scheduler.client.report [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 840.888849] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961302, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.056408] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: cf457d43-b939-4284-b84d-9075895e9dda] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 841.128032] env[68217]: DEBUG nova.compute.manager [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 841.187201] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961303, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.320167] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.648s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.320840] env[68217]: DEBUG nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 841.324606] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.822s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.324794] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.327689] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.921s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 841.329237] env[68217]: INFO nova.compute.claims [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 841.388236] env[68217]: INFO nova.scheduler.client.report [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted allocations for instance ba39e563-3e3a-40aa-815f-760f0f37a55d [ 841.399974] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961302, 'name': Destroy_Task, 'duration_secs': 0.618289} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 841.399974] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Destroyed the VM [ 841.399974] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 841.399974] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-75fb971a-34f3-42f3-be25-52dca0cefc29 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.410781] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 841.410781] env[68217]: value = "task-2961304" [ 841.410781] env[68217]: _type = "Task" [ 841.410781] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 841.426872] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961304, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.562417] env[68217]: DEBUG nova.network.neutron [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updated VIF entry in instance network info cache for port be6bc9f0-6147-4638-b306-5affbda64885. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 841.562417] env[68217]: DEBUG nova.network.neutron [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updating instance_info_cache with network_info: [{"id": "be6bc9f0-6147-4638-b306-5affbda64885", "address": "fa:16:3e:1b:f5:93", "network": {"id": "f4daa25a-4a3f-4e39-bbe4-55ca4773aeab", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-580485113-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d5183f68359f454dbd74f1e475288dd7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "496ac502-bfc4-4324-8332-cac473eb7cc4", "external-id": "nsx-vlan-transportzone-415", "segmentation_id": 415, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbe6bc9f0-61", "ovs_interfaceid": "be6bc9f0-6147-4638-b306-5affbda64885", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.563250] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 366c780a-2870-4e6e-8cfe-7eec10c363d5] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 841.670688] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 841.688040] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961303, 'name': PowerOnVM_Task} progress is 91%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.838319] env[68217]: DEBUG nova.compute.utils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 841.839806] env[68217]: DEBUG nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 841.839983] env[68217]: DEBUG nova.network.neutron [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 841.899374] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4b0b7ea-8083-41e7-baf3-1facf81f2f29 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "ba39e563-3e3a-40aa-815f-760f0f37a55d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.348s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 841.923156] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961304, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.936089] env[68217]: DEBUG nova.policy [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '752b6d9ab4d64b1390ca8388fb28db15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad331ad8f44348f6b4c0a6c56977022d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 842.037934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "09a8469d-567c-4247-96eb-edf0f4040f65" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.038229] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "09a8469d-567c-4247-96eb-edf0f4040f65" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.073209] env[68217]: DEBUG oslo_concurrency.lockutils [req-02aea796-0256-4e03-9538-98765c5582b7 req-85320a01-0631-402a-8658-460593acff1c service nova] Releasing lock "refresh_cache-95e625e9-a726-4c3c-be66-7b8ce93b5f8a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 842.073209] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 6113feaf-5c21-49c3-9c19-ea10b60786d3] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 842.191937] env[68217]: DEBUG oslo_vmware.api [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961303, 'name': PowerOnVM_Task, 'duration_secs': 1.127286} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.193054] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 842.193054] env[68217]: INFO nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Took 8.93 seconds to spawn the instance on the hypervisor. [ 842.193054] env[68217]: DEBUG nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 842.194542] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c40e78f-aa4f-4dda-9e65-dbbccceaec47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.345646] env[68217]: DEBUG nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 842.433176] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961304, 'name': RemoveSnapshot_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.545252] env[68217]: DEBUG nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 842.575487] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 8fcccac2-dae1-4af0-a2b2-787e1bb7c9be] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 842.716836] env[68217]: INFO nova.compute.manager [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Took 32.96 seconds to build instance. [ 842.733600] env[68217]: DEBUG nova.network.neutron [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Successfully updated port: 2edb7766-2fb2-49a8-8100-5abcb17581eb {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 842.747332] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02204c08-ff06-42e3-b3ac-07a05114fd3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.758030] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-512a02cb-872a-4722-ada6-82d2cf905c32 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.795768] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "bd62c682-24f2-4559-887a-03186409f699" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 842.796027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 842.796227] env[68217]: DEBUG nova.compute.manager [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Going to confirm migration 1 {{(pid=68217) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 842.798302] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f105588c-8a41-47e9-816c-2a5a4ac4c2f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.807496] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62704681-34d0-4450-8539-7430654a353e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.823789] env[68217]: DEBUG nova.compute.provider_tree [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.898659] env[68217]: DEBUG nova.network.neutron [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Successfully created port: 49bc7718-8633-456d-b4d1-6bcc8493670b {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 842.925367] env[68217]: DEBUG oslo_vmware.api [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961304, 'name': RemoveSnapshot_Task, 'duration_secs': 1.205861} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.926746] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 843.066350] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.080026] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: f748cf37-6605-49a2-a418-51667a0fac4a] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 843.221444] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e894b13-9ae8-4677-94d5-1dfb1d4f0ae1 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "09290e60-7751-408e-9d6d-20e7cb61767b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.472s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.238779] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "refresh_cache-d3468ec2-6548-400a-b247-a6ab1156cab5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.238928] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "refresh_cache-d3468ec2-6548-400a-b247-a6ab1156cab5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.239096] env[68217]: DEBUG nova.network.neutron [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.328847] env[68217]: DEBUG nova.scheduler.client.report [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 843.354966] env[68217]: DEBUG nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 843.403971] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 843.404350] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 843.404523] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 843.404738] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 843.404964] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 843.405186] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 843.405449] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 843.405642] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 843.405885] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 843.406110] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 843.406330] env[68217]: DEBUG nova.virt.hardware [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 843.409924] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce493238-7e73-48e2-8649-6d2c1a97085f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.421177] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90597f62-9a65-4270-96c9-4fb07bb0be4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.431925] env[68217]: WARNING nova.compute.manager [None req-ad6b0099-8ac1-4f7a-b37b-af061a3df14e tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Image not found during snapshot: nova.exception.ImageNotFound: Image 25135da3-3ff8-44c1-b73f-304cfd777743 could not be found. [ 843.463754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 843.464014] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 843.464211] env[68217]: DEBUG nova.network.neutron [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 843.464398] env[68217]: DEBUG nova.objects.instance [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lazy-loading 'info_cache' on Instance uuid bd62c682-24f2-4559-887a-03186409f699 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 843.585501] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: fcddfd72-a130-4efc-82cb-1fb22d33d684] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 843.795622] env[68217]: DEBUG nova.network.neutron [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 843.822490] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "3af571ce-c400-45a1-97ad-4fbd53395129" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 843.822785] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "3af571ce-c400-45a1-97ad-4fbd53395129" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.842015] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.511s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 843.842015] env[68217]: DEBUG nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 843.855754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.185s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 843.858458] env[68217]: INFO nova.compute.claims [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.005934] env[68217]: DEBUG nova.network.neutron [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Updating instance_info_cache with network_info: [{"id": "2edb7766-2fb2-49a8-8100-5abcb17581eb", "address": "fa:16:3e:f8:3f:71", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2edb7766-2f", "ovs_interfaceid": "2edb7766-2fb2-49a8-8100-5abcb17581eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 844.089260] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 17bea068-7d7a-4a87-8b27-91a7efcd45c5] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 844.260057] env[68217]: DEBUG nova.compute.manager [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 844.260057] env[68217]: DEBUG nova.compute.manager [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing instance network info cache due to event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 844.260057] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Acquiring lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.260057] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Acquired lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.260340] env[68217]: DEBUG nova.network.neutron [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 844.328046] env[68217]: DEBUG nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 844.355727] env[68217]: DEBUG nova.compute.utils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 844.357133] env[68217]: DEBUG nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 844.357317] env[68217]: DEBUG nova.network.neutron [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 844.363766] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.508s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 844.364948] env[68217]: DEBUG nova.compute.utils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Instance 49ec4f5f-043a-4d16-a371-947571c3b090 could not be found. {{(pid=68217) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 844.366172] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.300s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 844.368092] env[68217]: INFO nova.compute.claims [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.373045] env[68217]: DEBUG nova.compute.manager [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Instance disappeared during build. {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2520}} [ 844.373386] env[68217]: DEBUG nova.compute.manager [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Unplugging VIFs for instance {{(pid=68217) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3013}} [ 844.373503] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-49ec4f5f-043a-4d16-a371-947571c3b090" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.373638] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-49ec4f5f-043a-4d16-a371-947571c3b090" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 844.373842] env[68217]: DEBUG nova.network.neutron [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 844.427307] env[68217]: DEBUG nova.policy [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'efd2f6dc889c4be884aa07c8cbf7b5df', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8b359376e18b4c878d281b8fbec28f69', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 844.509041] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "refresh_cache-d3468ec2-6548-400a-b247-a6ab1156cab5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 844.509527] env[68217]: DEBUG nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Instance network_info: |[{"id": "2edb7766-2fb2-49a8-8100-5abcb17581eb", "address": "fa:16:3e:f8:3f:71", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2edb7766-2f", "ovs_interfaceid": "2edb7766-2fb2-49a8-8100-5abcb17581eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 844.510011] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f8:3f:71', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2edb7766-2fb2-49a8-8100-5abcb17581eb', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 844.519929] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 844.520255] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 844.520964] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab971b72-9777-4d1c-b29a-8ac48271cad1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.553365] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 844.553365] env[68217]: value = "task-2961305" [ 844.553365] env[68217]: _type = "Task" [ 844.553365] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.565587] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961305, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.593555] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: db4cf157-9511-423c-aa41-433af8d92b48] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 844.861028] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 844.861457] env[68217]: DEBUG nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 844.876890] env[68217]: DEBUG nova.compute.utils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Can not refresh info_cache because instance was not found {{(pid=68217) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1056}} [ 844.897885] env[68217]: DEBUG nova.network.neutron [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.041418] env[68217]: DEBUG nova.network.neutron [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.067415] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961305, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.103234] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: dfeeed37-8c84-4ecc-87ea-f4239f512fb1] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 845.128582] env[68217]: INFO nova.compute.manager [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Rescuing [ 845.128693] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.131031] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.131031] env[68217]: DEBUG nova.network.neutron [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 845.199825] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "03d61c68-1b37-4172-b276-67a73a0dc228" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 845.200081] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 845.238128] env[68217]: DEBUG nova.compute.manager [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 845.239080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e33a398-ea11-40ea-a7ba-025949be5504 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.440598] env[68217]: DEBUG nova.network.neutron [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance_info_cache with network_info: [{"id": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "address": "fa:16:3e:0c:de:50", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.237", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd1428ec3-01", "ovs_interfaceid": "d1428ec3-01c4-4a36-9a5b-dba91c81f279", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.544847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-49ec4f5f-043a-4d16-a371-947571c3b090" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.545182] env[68217]: DEBUG nova.compute.manager [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68217) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3036}} [ 845.545449] env[68217]: DEBUG nova.compute.manager [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 845.545645] env[68217]: DEBUG nova.network.neutron [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 845.571306] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961305, 'name': CreateVM_Task, 'duration_secs': 0.634492} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.574131] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 845.575678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.575678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 845.575678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 845.576052] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94e938f6-c1c3-4004-8fa5-87b6a6010115 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.584680] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 845.584680] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e59e28-0e3a-5157-a3b6-b8890365342e" [ 845.584680] env[68217]: _type = "Task" [ 845.584680] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.598410] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e59e28-0e3a-5157-a3b6-b8890365342e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.603752] env[68217]: DEBUG nova.network.neutron [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 845.605490] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 83d32dd6-2629-4451-a746-bf5270083e2a] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 845.705980] env[68217]: DEBUG nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 845.753724] env[68217]: INFO nova.compute.manager [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] instance snapshotting [ 845.763025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d90bd190-34d1-4488-9821-c5f825a02654 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.773968] env[68217]: DEBUG nova.network.neutron [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updated VIF entry in instance network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 845.774449] env[68217]: DEBUG nova.network.neutron [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [{"id": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "address": "fa:16:3e:25:6f:1a", "network": {"id": "2a4968db-54c5-475c-9fc7-56d5f6441110", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-553739001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a6348b1f20794ee9a016d409eee576e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f6f347d-9e", "ovs_interfaceid": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.800842] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3167b24c-5cdd-4a2c-9dac-c8ada31eb9cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.872010] env[68217]: DEBUG nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 845.897191] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d2e0ab-6473-4737-bf63-9b5dfaddc196 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.904531] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 845.904531] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.904699] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 845.904803] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.904952] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 845.905372] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 845.905648] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 845.905856] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 845.906073] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 845.906580] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 845.906824] env[68217]: DEBUG nova.virt.hardware [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 845.907780] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-016461f3-bd9b-4232-bc65-eb18d5fc072a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.913749] env[68217]: DEBUG nova.network.neutron [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Successfully updated port: 49bc7718-8633-456d-b4d1-6bcc8493670b {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.917437] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc6d359-5474-4848-9911-ac3ff04433c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.922904] env[68217]: DEBUG nova.network.neutron [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Successfully created port: 5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 845.933451] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3585405e-8106-438c-a03d-e00010c4a53d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.972791] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-bd62c682-24f2-4559-887a-03186409f699" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 845.973242] env[68217]: DEBUG nova.objects.instance [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lazy-loading 'migration_context' on Instance uuid bd62c682-24f2-4559-887a-03186409f699 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 845.977201] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2cc42a2-a431-4f04-9fdb-e21a72e64600 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.998467] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5c1ee4-19af-4b1c-ac73-b7a7a56e3715 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.016483] env[68217]: DEBUG nova.compute.provider_tree [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.097844] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e59e28-0e3a-5157-a3b6-b8890365342e, 'name': SearchDatastore_Task, 'duration_secs': 0.025534} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.098242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.098476] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 846.098719] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.098862] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.099055] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 846.099663] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-96fffee4-1602-4901-b535-29817430e236 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.110508] env[68217]: DEBUG nova.network.neutron [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.111704] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 846.111824] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 846.112977] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 678acc61-1c94-4152-b4e8-7569ab169ab9] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 846.114750] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68f58342-7438-40c5-83f5-8e174755b527 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.122670] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 846.122670] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294be55-e4dc-dba8-904a-b412f61d5a83" [ 846.122670] env[68217]: _type = "Task" [ 846.122670] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.134352] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294be55-e4dc-dba8-904a-b412f61d5a83, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.231791] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.277578] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Releasing lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 846.277868] env[68217]: DEBUG nova.compute.manager [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 846.278081] env[68217]: DEBUG nova.compute.manager [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing instance network info cache due to event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 846.278304] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Acquiring lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.278470] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Acquired lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.278653] env[68217]: DEBUG nova.network.neutron [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 846.280606] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "149bd497-4ee6-4ca2-9d18-b276e773aedf" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.280834] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.281106] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "149bd497-4ee6-4ca2-9d18-b276e773aedf-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 846.281386] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 846.281492] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 846.284251] env[68217]: INFO nova.compute.manager [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Terminating instance [ 846.314396] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 846.314750] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6493c58a-411d-4b64-9667-4157463000a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.326255] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 846.326255] env[68217]: value = "task-2961306" [ 846.326255] env[68217]: _type = "Task" [ 846.326255] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.338757] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961306, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.430019] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.430255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 846.430618] env[68217]: DEBUG nova.network.neutron [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 846.476238] env[68217]: DEBUG nova.objects.base [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 846.477761] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec88821c-7cdd-46a0-a4e3-f89add947b14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.502070] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb1d5923-9538-49ea-aa39-1107a3cb05a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.510586] env[68217]: DEBUG oslo_vmware.api [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 846.510586] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a69017-b59a-4d6f-3ddf-04da3e056567" [ 846.510586] env[68217]: _type = "Task" [ 846.510586] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.521788] env[68217]: DEBUG nova.scheduler.client.report [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 846.525999] env[68217]: DEBUG oslo_vmware.api [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a69017-b59a-4d6f-3ddf-04da3e056567, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.575812] env[68217]: DEBUG nova.network.neutron [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Updating instance_info_cache with network_info: [{"id": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "address": "fa:16:3e:ec:b6:8c", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1eb632e3-fe", "ovs_interfaceid": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.614155] env[68217]: INFO nova.compute.manager [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 49ec4f5f-043a-4d16-a371-947571c3b090] Took 1.07 seconds to deallocate network for instance. [ 846.618178] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 63e0fc9e-5182-4781-b007-69e2134718df] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 846.637561] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5294be55-e4dc-dba8-904a-b412f61d5a83, 'name': SearchDatastore_Task, 'duration_secs': 0.023035} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.638490] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-674fcbcb-8c89-4b61-8d48-e36b26aed2e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.648072] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 846.648072] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5258239c-b315-c176-dc55-f8fcb3e24146" [ 846.648072] env[68217]: _type = "Task" [ 846.648072] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.664538] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5258239c-b315-c176-dc55-f8fcb3e24146, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.788553] env[68217]: DEBUG nova.compute.manager [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 846.788774] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 846.789682] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf0b483-8664-4df3-9488-31bca34d09ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.799857] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 846.800192] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-53f943b3-810c-4f72-8923-4a95127accda {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.810988] env[68217]: DEBUG oslo_vmware.api [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 846.810988] env[68217]: value = "task-2961307" [ 846.810988] env[68217]: _type = "Task" [ 846.810988] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.823048] env[68217]: DEBUG oslo_vmware.api [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961307, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.838858] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961306, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.973073] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524b2e94-7459-0e2d-c87d-3b8c263a3042/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 846.974771] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b1752f-ad9e-4146-a626-07026e8542d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.985569] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524b2e94-7459-0e2d-c87d-3b8c263a3042/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 846.986411] env[68217]: ERROR oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524b2e94-7459-0e2d-c87d-3b8c263a3042/disk-0.vmdk due to incomplete transfer. [ 846.986949] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-66a1cb3e-0631-4b2e-b503-24f2be9a35f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.000026] env[68217]: DEBUG oslo_vmware.rw_handles [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/524b2e94-7459-0e2d-c87d-3b8c263a3042/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 847.000026] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Uploaded image 31c3d1c5-dcbd-447b-935c-0ac48e805003 to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 847.000910] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 847.001409] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5567d5c1-eec4-484f-954f-4140eedd9207 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.010335] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 847.010335] env[68217]: value = "task-2961308" [ 847.010335] env[68217]: _type = "Task" [ 847.010335] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.028025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.662s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.029472] env[68217]: DEBUG nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 847.039673] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.179s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 847.041524] env[68217]: INFO nova.compute.claims [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.044304] env[68217]: DEBUG oslo_vmware.api [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a69017-b59a-4d6f-3ddf-04da3e056567, 'name': SearchDatastore_Task, 'duration_secs': 0.011778} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.045197] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961308, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.052309] env[68217]: DEBUG nova.network.neutron [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 847.054393] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 847.078849] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.122655] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 480cbc5c-7e8e-4d7f-81ca-fb8b81b35ba9] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 847.163024] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5258239c-b315-c176-dc55-f8fcb3e24146, 'name': SearchDatastore_Task, 'duration_secs': 0.020294} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.163024] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 847.163024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] d3468ec2-6548-400a-b247-a6ab1156cab5/d3468ec2-6548-400a-b247-a6ab1156cab5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 847.163024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-054fb952-8e03-4ceb-961c-bf3941a65067 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.171038] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 847.171038] env[68217]: value = "task-2961309" [ 847.171038] env[68217]: _type = "Task" [ 847.171038] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.181375] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.331118] env[68217]: DEBUG oslo_vmware.api [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961307, 'name': PowerOffVM_Task, 'duration_secs': 0.485378} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.338300] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 847.338894] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 847.342135] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efe40391-1ab5-4776-ae82-eea036ef718e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.354718] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961306, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.427445] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 847.427445] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 847.427445] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleting the datastore file [datastore1] 149bd497-4ee6-4ca2-9d18-b276e773aedf {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 847.427445] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c9147332-dd95-4f9b-b9dc-ea4a44540d60 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.435917] env[68217]: DEBUG oslo_vmware.api [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 847.435917] env[68217]: value = "task-2961311" [ 847.435917] env[68217]: _type = "Task" [ 847.435917] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.445151] env[68217]: DEBUG oslo_vmware.api [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961311, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.524331] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961308, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.553501] env[68217]: DEBUG nova.compute.utils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 847.555019] env[68217]: DEBUG nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 847.555197] env[68217]: DEBUG nova.network.neutron [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 847.576020] env[68217]: DEBUG nova.network.neutron [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updated VIF entry in instance network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 847.576425] env[68217]: DEBUG nova.network.neutron [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [{"id": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "address": "fa:16:3e:25:6f:1a", "network": {"id": "2a4968db-54c5-475c-9fc7-56d5f6441110", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-553739001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a6348b1f20794ee9a016d409eee576e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f6f347d-9e", "ovs_interfaceid": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.581915] env[68217]: DEBUG nova.network.neutron [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance_info_cache with network_info: [{"id": "49bc7718-8633-456d-b4d1-6bcc8493670b", "address": "fa:16:3e:8b:bf:69", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49bc7718-86", "ovs_interfaceid": "49bc7718-8633-456d-b4d1-6bcc8493670b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.628115] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: aff2750d-b9c0-4e4b-a5ba-71c40a3e0ec5] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 847.648804] env[68217]: DEBUG nova.policy [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '011d38e070744a3fb3c515d5e669ed22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9ba7843e6144cd1877b48bc40cd64f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 847.654677] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d75a93ed-e281-403a-a242-cf53a70888f6 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "49ec4f5f-043a-4d16-a371-947571c3b090" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 7.032s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 847.683509] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961309, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.711712] env[68217]: DEBUG nova.compute.manager [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Received event network-changed-2edb7766-2fb2-49a8-8100-5abcb17581eb {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 847.711712] env[68217]: DEBUG nova.compute.manager [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Refreshing instance network info cache due to event network-changed-2edb7766-2fb2-49a8-8100-5abcb17581eb. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 847.711920] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Acquiring lock "refresh_cache-d3468ec2-6548-400a-b247-a6ab1156cab5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.712017] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Acquired lock "refresh_cache-d3468ec2-6548-400a-b247-a6ab1156cab5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 847.712898] env[68217]: DEBUG nova.network.neutron [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Refreshing network info cache for port 2edb7766-2fb2-49a8-8100-5abcb17581eb {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 847.842240] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961306, 'name': CreateSnapshot_Task, 'duration_secs': 1.114223} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.842646] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 847.844028] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43189c82-32d9-484c-9671-b904819783ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.947909] env[68217]: DEBUG oslo_vmware.api [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961311, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.420418} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.948310] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 847.948545] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 847.948768] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 847.948993] env[68217]: INFO nova.compute.manager [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Took 1.16 seconds to destroy the instance on the hypervisor. [ 847.949358] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 847.949625] env[68217]: DEBUG nova.compute.manager [-] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 847.949759] env[68217]: DEBUG nova.network.neutron [-] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 848.024487] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961308, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.059997] env[68217]: DEBUG nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 848.080681] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Releasing lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.080966] env[68217]: DEBUG nova.compute.manager [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Received event network-vif-plugged-2edb7766-2fb2-49a8-8100-5abcb17581eb {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 848.081183] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Acquiring lock "d3468ec2-6548-400a-b247-a6ab1156cab5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 848.081396] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 848.081597] env[68217]: DEBUG oslo_concurrency.lockutils [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 848.082476] env[68217]: DEBUG nova.compute.manager [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] No waiting events found dispatching network-vif-plugged-2edb7766-2fb2-49a8-8100-5abcb17581eb {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 848.082476] env[68217]: WARNING nova.compute.manager [req-c4a46496-313c-4289-b012-8b8319a03d21 req-f6c84e0f-4d1f-46fc-9bc0-c4440e5eab82 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Received unexpected event network-vif-plugged-2edb7766-2fb2-49a8-8100-5abcb17581eb for instance with vm_state building and task_state spawning. [ 848.084727] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 848.085055] env[68217]: DEBUG nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Instance network_info: |[{"id": "49bc7718-8633-456d-b4d1-6bcc8493670b", "address": "fa:16:3e:8b:bf:69", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49bc7718-86", "ovs_interfaceid": "49bc7718-8633-456d-b4d1-6bcc8493670b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 848.085492] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8b:bf:69', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '49bc7718-8633-456d-b4d1-6bcc8493670b', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 848.093725] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 848.094141] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 848.094466] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9497567-b624-4f64-ae39-518cd4138e4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.120397] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 848.120397] env[68217]: value = "task-2961312" [ 848.120397] env[68217]: _type = "Task" [ 848.120397] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.137644] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 93b49e91-5e9a-4b11-a833-31ab0883e0e8] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 848.139332] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961312, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.184452] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.871972} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.185146] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] d3468ec2-6548-400a-b247-a6ab1156cab5/d3468ec2-6548-400a-b247-a6ab1156cab5.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 848.185409] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 848.185960] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a83dcd11-b8ea-4e76-ac6b-1d4a0a2ef199 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.193293] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 848.193293] env[68217]: value = "task-2961313" [ 848.193293] env[68217]: _type = "Task" [ 848.193293] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.205467] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961313, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.370378] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 848.373425] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-b10602c4-8bb4-4015-a89d-3be9e8459af3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.385083] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 848.385083] env[68217]: value = "task-2961314" [ 848.385083] env[68217]: _type = "Task" [ 848.385083] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.396351] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961314, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.478017] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d92552b-6f5f-498c-bf51-b457f181bb1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.484825] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cee126-019b-4433-a951-edeb6e7e1c8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.527536] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3ca04d-1c7c-425b-831c-86d9eb906186 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.537811] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961308, 'name': Destroy_Task, 'duration_secs': 1.07917} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.540259] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Destroyed the VM [ 848.540536] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 848.540903] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-6dd47807-fbf9-4675-84b6-11a787ce7746 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.543588] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac596e25-00f1-4e26-b017-3fd7d2ff3927 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.575523] env[68217]: DEBUG nova.compute.provider_tree [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.579649] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 848.579649] env[68217]: value = "task-2961315" [ 848.579649] env[68217]: _type = "Task" [ 848.579649] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.597367] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961315, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.630545] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 848.631100] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b2e45a1e-75cb-459d-9398-9593b6a5ffb4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.638728] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961312, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.643791] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: af11d05f-4432-4505-bb52-226414488960] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 848.647446] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 848.647446] env[68217]: value = "task-2961316" [ 848.647446] env[68217]: _type = "Task" [ 848.647446] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.661545] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.710224] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961313, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.219165} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.710567] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 848.711524] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c488411-4513-4712-a9c8-11e65282df2c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.737271] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] d3468ec2-6548-400a-b247-a6ab1156cab5/d3468ec2-6548-400a-b247-a6ab1156cab5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 848.737463] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1bb64ee8-3dd4-41e7-a569-63c801bb311f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.761208] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 848.761208] env[68217]: value = "task-2961317" [ 848.761208] env[68217]: _type = "Task" [ 848.761208] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.770969] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961317, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.893994] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961314, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.935333] env[68217]: DEBUG nova.network.neutron [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Successfully created port: ed3866d2-77b3-4f38-903b-76d8b6349fc8 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 849.081088] env[68217]: DEBUG nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 849.084377] env[68217]: DEBUG nova.scheduler.client.report [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 849.100822] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961315, 'name': RemoveSnapshot_Task, 'duration_secs': 0.392031} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.101127] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 849.101399] env[68217]: DEBUG nova.compute.manager [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 849.102828] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9183d1ff-1569-49de-8338-6f40363ebe1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.128751] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 849.128944] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 849.129139] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 849.129355] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 849.129528] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 849.129696] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 849.129949] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 849.130149] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 849.130359] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 849.130541] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 849.131825] env[68217]: DEBUG nova.virt.hardware [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 849.132258] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ec672de-797c-4e59-8766-5a72d371461a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.141404] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961312, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.146482] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-436845b6-3241-4a24-b11f-4db45845d576 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.150050] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: bbd282ea-58aa-47b8-aa82-283a55ac1b29] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 849.169689] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961316, 'name': PowerOffVM_Task, 'duration_secs': 0.481077} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.170612] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 849.171460] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc1cb620-8a2d-4694-981a-ed509f09d7e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.174893] env[68217]: DEBUG nova.network.neutron [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Updated VIF entry in instance network info cache for port 2edb7766-2fb2-49a8-8100-5abcb17581eb. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 849.175203] env[68217]: DEBUG nova.network.neutron [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Updating instance_info_cache with network_info: [{"id": "2edb7766-2fb2-49a8-8100-5abcb17581eb", "address": "fa:16:3e:f8:3f:71", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2edb7766-2f", "ovs_interfaceid": "2edb7766-2fb2-49a8-8100-5abcb17581eb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.200251] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c53f1d36-f757-405e-bad6-4913b9bcff30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.232311] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.232607] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-375523e5-4bb4-4fa0-8487-e893c24199c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.242634] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 849.242634] env[68217]: value = "task-2961318" [ 849.242634] env[68217]: _type = "Task" [ 849.242634] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.253159] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961318, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.273068] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961317, 'name': ReconfigVM_Task, 'duration_secs': 0.490323} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.273068] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Reconfigured VM instance instance-00000042 to attach disk [datastore2] d3468ec2-6548-400a-b247-a6ab1156cab5/d3468ec2-6548-400a-b247-a6ab1156cab5.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 849.273068] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-469f94a8-3c89-4e0c-8b9f-ea8265421d72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.279030] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 849.279030] env[68217]: value = "task-2961319" [ 849.279030] env[68217]: _type = "Task" [ 849.279030] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.287707] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961319, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.397391] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961314, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.446110] env[68217]: DEBUG nova.network.neutron [-] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 849.592787] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.554s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.593928] env[68217]: DEBUG nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 849.596347] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.365s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.597815] env[68217]: INFO nova.compute.claims [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 849.621017] env[68217]: INFO nova.compute.manager [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Shelve offloading [ 849.633309] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961312, 'name': CreateVM_Task, 'duration_secs': 1.469199} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.633471] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 849.634272] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.634485] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.634997] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 849.635304] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a8dfd22-b62e-4cce-bf86-3b255ea934e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.644411] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 849.644411] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d4329c-3dda-f21f-72b3-0182390feaff" [ 849.644411] env[68217]: _type = "Task" [ 849.644411] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.658077] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 0caf6fa0-d8f0-47a4-ad44-8b9f739f9ca5] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 849.659838] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d4329c-3dda-f21f-72b3-0182390feaff, 'name': SearchDatastore_Task, 'duration_secs': 0.012327} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.660454] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.660683] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.661010] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.661167] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.661340] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.661584] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-354cce91-a753-4dbf-9ded-8161b0d2cdb6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.671588] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.671860] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.672700] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3175c066-3010-44db-8ddc-60d75081688c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.677741] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Releasing lock "refresh_cache-d3468ec2-6548-400a-b247-a6ab1156cab5" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 849.677973] env[68217]: DEBUG nova.compute.manager [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Received event network-vif-plugged-49bc7718-8633-456d-b4d1-6bcc8493670b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.678174] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Acquiring lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 849.678368] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 849.678524] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 849.678688] env[68217]: DEBUG nova.compute.manager [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] No waiting events found dispatching network-vif-plugged-49bc7718-8633-456d-b4d1-6bcc8493670b {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 849.678850] env[68217]: WARNING nova.compute.manager [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Received unexpected event network-vif-plugged-49bc7718-8633-456d-b4d1-6bcc8493670b for instance with vm_state building and task_state spawning. [ 849.679139] env[68217]: DEBUG nova.compute.manager [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Received event network-changed-49bc7718-8633-456d-b4d1-6bcc8493670b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 849.679207] env[68217]: DEBUG nova.compute.manager [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Refreshing instance network info cache due to event network-changed-49bc7718-8633-456d-b4d1-6bcc8493670b. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 849.679342] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Acquiring lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.679475] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Acquired lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 849.679638] env[68217]: DEBUG nova.network.neutron [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Refreshing network info cache for port 49bc7718-8633-456d-b4d1-6bcc8493670b {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 849.681854] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 849.681854] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ee4b6d-563c-8222-16ff-a932630d8cd4" [ 849.681854] env[68217]: _type = "Task" [ 849.681854] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.691878] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ee4b6d-563c-8222-16ff-a932630d8cd4, 'name': SearchDatastore_Task, 'duration_secs': 0.008946} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.692623] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f070fb59-af3e-4614-a5f4-63bb7375a586 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.698957] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 849.698957] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5217e7e4-64cd-9777-f047-774e583ed294" [ 849.698957] env[68217]: _type = "Task" [ 849.698957] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.707446] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5217e7e4-64cd-9777-f047-774e583ed294, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.753922] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 849.754113] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 849.754336] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.770165] env[68217]: DEBUG nova.network.neutron [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Successfully updated port: 5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 849.792044] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961319, 'name': Rename_Task, 'duration_secs': 0.185662} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.792337] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 849.792972] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-971441a3-e2e8-47af-bab6-dd65e31020bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.800407] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 849.800407] env[68217]: value = "task-2961320" [ 849.800407] env[68217]: _type = "Task" [ 849.800407] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.809490] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961320, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.894862] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961314, 'name': CloneVM_Task, 'duration_secs': 1.350278} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.895149] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Created linked-clone VM from snapshot [ 849.895906] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c3e97d-c239-463c-bce8-bbf4a8444904 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.903715] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Uploading image 73846363-8ec9-4ba7-8de7-f03414dbfcf7 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 849.935446] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 849.935446] env[68217]: value = "vm-594290" [ 849.935446] env[68217]: _type = "VirtualMachine" [ 849.935446] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 849.935736] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-99a3444a-c7ad-4fea-970e-0f854be89b4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.944023] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lease: (returnval){ [ 849.944023] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5223c62c-00d3-7a0c-2cbc-9be93b353780" [ 849.944023] env[68217]: _type = "HttpNfcLease" [ 849.944023] env[68217]: } obtained for exporting VM: (result){ [ 849.944023] env[68217]: value = "vm-594290" [ 849.944023] env[68217]: _type = "VirtualMachine" [ 849.944023] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 849.944296] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the lease: (returnval){ [ 849.944296] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5223c62c-00d3-7a0c-2cbc-9be93b353780" [ 849.944296] env[68217]: _type = "HttpNfcLease" [ 849.944296] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 849.948941] env[68217]: INFO nova.compute.manager [-] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Took 2.00 seconds to deallocate network for instance. [ 849.957021] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 849.957021] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5223c62c-00d3-7a0c-2cbc-9be93b353780" [ 849.957021] env[68217]: _type = "HttpNfcLease" [ 849.957021] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 850.102469] env[68217]: DEBUG nova.compute.utils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 850.105771] env[68217]: DEBUG nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 850.105942] env[68217]: DEBUG nova.network.neutron [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 850.122525] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 850.122797] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8d0a4bf2-0641-4cd9-8ff5-4979a21c4aaf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.131957] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 850.131957] env[68217]: value = "task-2961322" [ 850.131957] env[68217]: _type = "Task" [ 850.131957] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.142496] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 850.142762] env[68217]: DEBUG nova.compute.manager [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.144246] env[68217]: DEBUG nova.policy [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fd08981ea724019826d597a1c8b4ecd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6233e9874c41329f81c990f8bc72b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 850.146183] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-801f02b5-f433-4e18-8694-a674ce5b1bf9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.153616] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.153782] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.153949] env[68217]: DEBUG nova.network.neutron [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.160835] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: ae5fa3f4-e487-40ed-9ca4-12a6f9713eba] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 850.209391] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5217e7e4-64cd-9777-f047-774e583ed294, 'name': SearchDatastore_Task, 'duration_secs': 0.012249} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.209686] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 850.209947] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 2612f6fc-a43f-4011-8a09-51088a49371a/2612f6fc-a43f-4011-8a09-51088a49371a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.210248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.210438] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 850.210654] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1e68cf0c-61ce-4c97-861f-bdf5289b9a0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.212777] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-14e9f61e-6395-493e-bdfd-fe681479d105 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.220589] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 850.220589] env[68217]: value = "task-2961323" [ 850.220589] env[68217]: _type = "Task" [ 850.220589] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.225277] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 850.225449] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 850.226731] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-42396ee0-8ba9-480c-a308-9a5fbee79b5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.232313] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961323, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.235865] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 850.235865] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b88370-280e-bbd0-a175-9a4d0409deb2" [ 850.235865] env[68217]: _type = "Task" [ 850.235865] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.245690] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b88370-280e-bbd0-a175-9a4d0409deb2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.275413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.275599] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquired lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.275913] env[68217]: DEBUG nova.network.neutron [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 850.311557] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961320, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.439489] env[68217]: DEBUG nova.compute.manager [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 850.439700] env[68217]: DEBUG nova.compute.manager [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing instance network info cache due to event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 850.439911] env[68217]: DEBUG oslo_concurrency.lockutils [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] Acquiring lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.440086] env[68217]: DEBUG oslo_concurrency.lockutils [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] Acquired lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 850.440261] env[68217]: DEBUG nova.network.neutron [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 850.456799] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 850.457151] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 850.457151] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5223c62c-00d3-7a0c-2cbc-9be93b353780" [ 850.457151] env[68217]: _type = "HttpNfcLease" [ 850.457151] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 850.457734] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 850.457734] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5223c62c-00d3-7a0c-2cbc-9be93b353780" [ 850.457734] env[68217]: _type = "HttpNfcLease" [ 850.457734] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 850.458763] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c118ddac-fc52-4cb4-a749-9572a3a6a7b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.467239] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ade483-ce28-afa1-dff0-10bbd7c1f7c2/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 850.467447] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ade483-ce28-afa1-dff0-10bbd7c1f7c2/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 850.571571] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-b2ca27c6-3df6-422c-94e9-ad5bb5eb3ee3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.606227] env[68217]: DEBUG nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 850.664148] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 00d2302b-84d4-42d8-94c7-caf45b925ddf] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 850.733815] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961323, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.750805] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b88370-280e-bbd0-a175-9a4d0409deb2, 'name': SearchDatastore_Task, 'duration_secs': 0.019318} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.755573] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc239d11-e821-4217-a252-a0e6eb7a4692 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.763578] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 850.763578] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52102422-24df-6af4-d63d-899b3f62fb9e" [ 850.763578] env[68217]: _type = "Task" [ 850.763578] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.773772] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52102422-24df-6af4-d63d-899b3f62fb9e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.815112] env[68217]: DEBUG oslo_vmware.api [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961320, 'name': PowerOnVM_Task, 'duration_secs': 0.665289} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.819018] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 850.819018] env[68217]: INFO nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Took 10.11 seconds to spawn the instance on the hypervisor. [ 850.819018] env[68217]: DEBUG nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 850.819018] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72baa46-8d3a-42fe-a453-c6490a859623 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.820556] env[68217]: DEBUG nova.network.neutron [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 850.913717] env[68217]: DEBUG nova.network.neutron [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updated VIF entry in instance network info cache for port 49bc7718-8633-456d-b4d1-6bcc8493670b. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 850.915281] env[68217]: DEBUG nova.network.neutron [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance_info_cache with network_info: [{"id": "49bc7718-8633-456d-b4d1-6bcc8493670b", "address": "fa:16:3e:8b:bf:69", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49bc7718-86", "ovs_interfaceid": "49bc7718-8633-456d-b4d1-6bcc8493670b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 850.985954] env[68217]: DEBUG nova.network.neutron [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updating instance_info_cache with network_info: [{"id": "5186da63-8f63-49b5-8750-b04b51317122", "address": "fa:16:3e:05:bd:f2", "network": {"id": "2046820f-fbc9-4e17-97f7-bb97bc53083f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-34123343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b359376e18b4c878d281b8fbec28f69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186da63-8f", "ovs_interfaceid": "5186da63-8f63-49b5-8750-b04b51317122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.103608] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7102b3fe-3f3a-4391-82e6-b4fee74546d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.108616] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-539d7711-ab24-47d9-9e8e-503c807b8256 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.143099] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9649cf37-049c-4230-8163-924947114ee7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.153146] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97b99d45-77a2-42d3-94c7-32be3592614b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.171276] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 14c8e8e6-5d7f-45b4-8a84-d5951c38573f] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 851.174704] env[68217]: DEBUG nova.compute.provider_tree [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 851.232291] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961323, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.57525} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.232729] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 2612f6fc-a43f-4011-8a09-51088a49371a/2612f6fc-a43f-4011-8a09-51088a49371a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 851.233144] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 851.233501] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8ac67ae-73cb-4197-80b3-26443b3ed297 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.244479] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 851.244479] env[68217]: value = "task-2961324" [ 851.244479] env[68217]: _type = "Task" [ 851.244479] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.254028] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961324, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.279241] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52102422-24df-6af4-d63d-899b3f62fb9e, 'name': SearchDatastore_Task, 'duration_secs': 0.014945} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.280516] env[68217]: DEBUG nova.network.neutron [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Successfully created port: 56c6f7cc-e41b-4494-84b7-8925f5d7e59d {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 851.282946] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.283336] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. {{(pid=68217) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 851.283803] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6f32a556-1e95-40d1-9f34-b21a1f20978d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.292496] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 851.292496] env[68217]: value = "task-2961325" [ 851.292496] env[68217]: _type = "Task" [ 851.292496] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.301933] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.342752] env[68217]: INFO nova.compute.manager [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Took 28.26 seconds to build instance. [ 851.417181] env[68217]: DEBUG oslo_concurrency.lockutils [req-5927fc87-4c71-41d2-855b-2117c3ea7fbc req-798d2d45-115f-45b8-89f2-c68cfe7cbd78 service nova] Releasing lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.490982] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Releasing lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 851.491394] env[68217]: DEBUG nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Instance network_info: |[{"id": "5186da63-8f63-49b5-8750-b04b51317122", "address": "fa:16:3e:05:bd:f2", "network": {"id": "2046820f-fbc9-4e17-97f7-bb97bc53083f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-34123343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b359376e18b4c878d281b8fbec28f69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186da63-8f", "ovs_interfaceid": "5186da63-8f63-49b5-8750-b04b51317122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 851.491817] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:bd:f2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3d2e4070-a78e-4d08-a104-b6312ab65577', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5186da63-8f63-49b5-8750-b04b51317122', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.499686] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Creating folder: Project (8b359376e18b4c878d281b8fbec28f69). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.500333] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8c4b3aa5-8978-4dba-a474-458d691f4bf4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.518856] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Created folder: Project (8b359376e18b4c878d281b8fbec28f69) in parent group-v594094. [ 851.519075] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Creating folder: Instances. Parent ref: group-v594291. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.519327] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-90943cd1-4409-453f-9a11-d113aa0d6662 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.533436] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Created folder: Instances in parent group-v594291. [ 851.533436] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 851.533436] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 851.533436] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b6ac6a4d-c05b-4b4f-933a-eacf799f9833 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.554243] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.554243] env[68217]: value = "task-2961328" [ 851.554243] env[68217]: _type = "Task" [ 851.554243] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.563134] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961328, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.563932] env[68217]: DEBUG nova.network.neutron [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap686a0657-d9", "ovs_interfaceid": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.645870] env[68217]: DEBUG nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 851.677679] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: b0b21c65-ef3d-4492-a6b2-d2321a3dacde] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 851.680558] env[68217]: DEBUG nova.scheduler.client.report [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 851.691609] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 851.694521] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.695361] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 851.695734] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.696086] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 851.696513] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 851.696866] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 851.698940] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 851.698940] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 851.698940] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 851.698940] env[68217]: DEBUG nova.virt.hardware [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 851.699886] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fab917c-6e0c-4c8e-9e6f-c3031779c450 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.710095] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b23bb478-3f02-4944-95de-199706373b09 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.757546] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961324, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073871} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.758235] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.759398] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94ee4a78-c9fe-4dec-aece-9d4cc207aec0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.792076] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 2612f6fc-a43f-4011-8a09-51088a49371a/2612f6fc-a43f-4011-8a09-51088a49371a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.792076] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8ab4df3d-1d87-4b2f-b443-1e9b3fdcf182 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.818069] env[68217]: DEBUG nova.network.neutron [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updated VIF entry in instance network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 851.818526] env[68217]: DEBUG nova.network.neutron [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [{"id": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "address": "fa:16:3e:25:6f:1a", "network": {"id": "2a4968db-54c5-475c-9fc7-56d5f6441110", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-553739001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a6348b1f20794ee9a016d409eee576e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f6f347d-9e", "ovs_interfaceid": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.823071] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 851.823071] env[68217]: value = "task-2961329" [ 851.823071] env[68217]: _type = "Task" [ 851.823071] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.828217] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961325, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.838570] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961329, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.844276] env[68217]: DEBUG oslo_concurrency.lockutils [None req-60621a69-33bd-45f2-a88d-93283eab88fc tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.772s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.065249] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961328, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.067485] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.199152] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 9ac81867-311c-42f3-b38f-67dc10f409c0] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 852.201783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.605s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.202380] env[68217]: DEBUG nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 852.205937] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.151s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.251416] env[68217]: DEBUG nova.network.neutron [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Successfully updated port: ed3866d2-77b3-4f38-903b-76d8b6349fc8 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 852.321927] env[68217]: DEBUG oslo_concurrency.lockutils [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] Releasing lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 852.322324] env[68217]: DEBUG nova.compute.manager [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Received event network-vif-deleted-9f5258ed-d071-4e53-9f7e-43d85ef5ae04 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.322522] env[68217]: DEBUG nova.compute.manager [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Received event network-vif-plugged-5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 852.322889] env[68217]: DEBUG oslo_concurrency.lockutils [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] Acquiring lock "62628aed-e2f9-478f-bed7-00757fc3c484-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 852.323435] env[68217]: DEBUG oslo_concurrency.lockutils [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] Lock "62628aed-e2f9-478f-bed7-00757fc3c484-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 852.323503] env[68217]: DEBUG oslo_concurrency.lockutils [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] Lock "62628aed-e2f9-478f-bed7-00757fc3c484-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 852.323691] env[68217]: DEBUG nova.compute.manager [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] No waiting events found dispatching network-vif-plugged-5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 852.323915] env[68217]: WARNING nova.compute.manager [req-c6de12f6-8ca8-46f2-88ea-1ca98afb94a8 req-1835b412-428c-42f4-9b98-2555f0d19014 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Received unexpected event network-vif-plugged-5186da63-8f63-49b5-8750-b04b51317122 for instance with vm_state building and task_state spawning. [ 852.324339] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.73954} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.324639] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. [ 852.325529] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0af016e1-1684-4f57-b1a9-43497697f3c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.339192] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961329, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.361016] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 852.361398] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-521a7de3-a19e-4eef-a461-5a85d173c69d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.383479] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 852.383479] env[68217]: value = "task-2961330" [ 852.383479] env[68217]: _type = "Task" [ 852.383479] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.394218] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961330, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.559558] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 852.560363] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ee2917-0537-4e89-b803-40eb81362ea3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.568772] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961328, 'name': CreateVM_Task, 'duration_secs': 0.534164} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.570789] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 852.571124] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 852.571806] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.571962] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.572289] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 852.572517] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee8a34a0-67d1-4da7-8a03-007edf080325 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.574181] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-430e82d8-9824-4be6-be54-bdb3332ca4c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.578803] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 852.578803] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520428b1-0eee-b121-49f6-94869b212525" [ 852.578803] env[68217]: _type = "Task" [ 852.578803] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.586416] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520428b1-0eee-b121-49f6-94869b212525, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.646172] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 852.646427] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 852.646652] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleting the datastore file [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 852.647367] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b9c82479-b6c7-4f71-847b-2d884071d062 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.653580] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 852.653580] env[68217]: value = "task-2961332" [ 852.653580] env[68217]: _type = "Task" [ 852.653580] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.661564] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961332, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.706458] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 4f4dc254-8e4f-4c5f-a2a8-eef6230825c6] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 852.709465] env[68217]: DEBUG nova.compute.utils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 852.714446] env[68217]: DEBUG nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 852.714446] env[68217]: DEBUG nova.network.neutron [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 852.755634] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "refresh_cache-09a8469d-567c-4247-96eb-edf0f4040f65" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.755762] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "refresh_cache-09a8469d-567c-4247-96eb-edf0f4040f65" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 852.755913] env[68217]: DEBUG nova.network.neutron [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 852.789904] env[68217]: DEBUG nova.policy [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '554b6b3d22404c0ba52c739b3c7b98a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb62d18446841a3b2a6ac25ab5dc869', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 853.611311] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 471e8a27-ed87-461a-b817-cd5ad208dd10] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 853.613495] env[68217]: DEBUG nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 853.622371] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Received event network-changed-5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 853.622553] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Refreshing instance network info cache due to event network-changed-5186da63-8f63-49b5-8750-b04b51317122. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 853.622762] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Acquiring lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.622950] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Acquired lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.623163] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Refreshing network info cache for port 5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 853.648571] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520428b1-0eee-b121-49f6-94869b212525, 'name': SearchDatastore_Task, 'duration_secs': 0.01462} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.658368] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 853.658614] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 853.659315] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.659315] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 853.659315] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.659464] env[68217]: DEBUG oslo_vmware.api [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961332, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.328675} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.660427] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961330, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.661074] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961329, 'name': ReconfigVM_Task, 'duration_secs': 0.671585} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.664246] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4129c081-d15b-41f7-b39b-8d5da992219a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.666204] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 853.666363] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 853.666578] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 853.668424] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 2612f6fc-a43f-4011-8a09-51088a49371a/2612f6fc-a43f-4011-8a09-51088a49371a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 853.670253] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-648a0e70-e568-4317-bf54-a960a403af29 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.679887] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 853.679887] env[68217]: value = "task-2961333" [ 853.679887] env[68217]: _type = "Task" [ 853.679887] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.681957] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.682519] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 853.689028] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-51bc987b-c8ea-4d9c-a929-ff15a64ce040 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.699012] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961333, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.699681] env[68217]: INFO nova.scheduler.client.report [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleted allocations for instance 58c15727-79ae-404f-a054-d71e3be498cc [ 853.703268] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 853.703268] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528dfaac-60ee-9cc8-f4f9-cab4598ff1c8" [ 853.703268] env[68217]: _type = "Task" [ 853.703268] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.715430] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528dfaac-60ee-9cc8-f4f9-cab4598ff1c8, 'name': SearchDatastore_Task} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.716227] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4837615-5fe4-495f-a238-d11b128dc000 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.721443] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 853.721443] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526ca607-2a07-f5c6-61ee-e3b078f441b3" [ 853.721443] env[68217]: _type = "Task" [ 853.721443] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.725034] env[68217]: DEBUG nova.network.neutron [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 853.735327] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526ca607-2a07-f5c6-61ee-e3b078f441b3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.792799] env[68217]: DEBUG nova.network.neutron [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Successfully created port: f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 853.964338] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4713084c-84bf-4c1c-b71c-6870d19f12c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.973103] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d517f25-2a6a-472a-8416-22666145353d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.004730] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8115745b-12f5-4247-833e-e0730a7e0947 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.011996] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-616f5cc1-5809-4ac0-bfec-8bf44aa17a81 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.027224] env[68217]: DEBUG nova.compute.provider_tree [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.121052] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961330, 'name': ReconfigVM_Task, 'duration_secs': 1.389886} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.121362] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 854.122241] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b27a5bd0-8b49-4ffe-b12a-25b24a97bb4c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.145539] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 71dd4921-5859-421f-9e31-e9800adc9e3c] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 854.154071] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d2abea3a-68dd-4344-a239-5196c52f2e46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.170615] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 854.170615] env[68217]: value = "task-2961334" [ 854.170615] env[68217]: _type = "Task" [ 854.170615] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.181163] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961334, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.182192] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "2e3dae16-dba3-4230-913d-7a5c3469e36e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.182395] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.182577] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "2e3dae16-dba3-4230-913d-7a5c3469e36e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.182742] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.182893] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.188241] env[68217]: INFO nova.compute.manager [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Terminating instance [ 854.197742] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961333, 'name': Rename_Task, 'duration_secs': 0.236266} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.198042] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.198321] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8353e76e-7341-4ee1-9636-fe65922e65c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.206618] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 854.206618] env[68217]: value = "task-2961335" [ 854.206618] env[68217]: _type = "Task" [ 854.206618] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.207846] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.217320] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.236133] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526ca607-2a07-f5c6-61ee-e3b078f441b3, 'name': SearchDatastore_Task, 'duration_secs': 0.01218} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.236298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.236587] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 62628aed-e2f9-478f-bed7-00757fc3c484/62628aed-e2f9-478f-bed7-00757fc3c484.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 854.236853] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-31776f61-07c7-49dc-b14f-1df09bbee1cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.243548] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 854.243548] env[68217]: value = "task-2961336" [ 854.243548] env[68217]: _type = "Task" [ 854.243548] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.247566] env[68217]: DEBUG nova.network.neutron [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Updating instance_info_cache with network_info: [{"id": "ed3866d2-77b3-4f38-903b-76d8b6349fc8", "address": "fa:16:3e:a6:47:4c", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped3866d2-77", "ovs_interfaceid": "ed3866d2-77b3-4f38-903b-76d8b6349fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.255477] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961336, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.278895] env[68217]: DEBUG nova.network.neutron [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Successfully updated port: 56c6f7cc-e41b-4494-84b7-8925f5d7e59d {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 854.528953] env[68217]: DEBUG nova.scheduler.client.report [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 854.655328] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 11f9c054-62b9-4ac9-9651-5c85e7a86663] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 854.659090] env[68217]: DEBUG nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 854.684626] env[68217]: DEBUG nova.compute.manager [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 854.690107] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caffbace-d26f-43ae-ab18-93b83738fb40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.693515] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961334, 'name': ReconfigVM_Task, 'duration_secs': 0.215157} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.696316] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 854.696669] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 854.696806] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 854.697013] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 854.697172] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 854.698928] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 854.698928] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 854.698928] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 854.698928] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 854.698928] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 854.699423] env[68217]: DEBUG nova.virt.hardware [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 854.699423] env[68217]: DEBUG nova.compute.manager [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 854.699423] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 854.700091] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 854.703132] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d0d9bdc-9c7a-4ff5-ba72-bc329ef12f09 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.704855] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11eb403b-611b-44e7-b4b8-4eccdba6764a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.708854] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d8c767e6-fa5c-4b02-adc6-3e2d27565975 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.726892] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 854.729044] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1a056c50-517c-4262-a4c5-082b4a9d09e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.731992] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e717d4-019b-4f11-8d0d-a45f06f3bc9b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.737245] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 854.737245] env[68217]: value = "task-2961337" [ 854.737245] env[68217]: _type = "Task" [ 854.737245] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.742705] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961335, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.757461] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "refresh_cache-09a8469d-567c-4247-96eb-edf0f4040f65" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.757956] env[68217]: DEBUG nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Instance network_info: |[{"id": "ed3866d2-77b3-4f38-903b-76d8b6349fc8", "address": "fa:16:3e:a6:47:4c", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped3866d2-77", "ovs_interfaceid": "ed3866d2-77b3-4f38-903b-76d8b6349fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 854.766196] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:47:4c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ed3866d2-77b3-4f38-903b-76d8b6349fc8', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 854.776512] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 854.777406] env[68217]: DEBUG oslo_vmware.api [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 854.777406] env[68217]: value = "task-2961338" [ 854.777406] env[68217]: _type = "Task" [ 854.777406] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.782175] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 854.783355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "refresh_cache-3af571ce-c400-45a1-97ad-4fbd53395129" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.783511] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "refresh_cache-3af571ce-c400-45a1-97ad-4fbd53395129" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.783666] env[68217]: DEBUG nova.network.neutron [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 854.785131] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961337, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.786572] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updated VIF entry in instance network info cache for port 5186da63-8f63-49b5-8750-b04b51317122. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 854.787094] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updating instance_info_cache with network_info: [{"id": "5186da63-8f63-49b5-8750-b04b51317122", "address": "fa:16:3e:05:bd:f2", "network": {"id": "2046820f-fbc9-4e17-97f7-bb97bc53083f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-34123343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b359376e18b4c878d281b8fbec28f69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186da63-8f", "ovs_interfaceid": "5186da63-8f63-49b5-8750-b04b51317122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.789146] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6f1c6be6-d99c-4a2b-8e2a-9774e3ee2a63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.813108] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Releasing lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 854.813427] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 854.813593] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing instance network info cache due to event network-changed-6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 854.813848] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Acquiring lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.813990] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Acquired lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.814190] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Refreshing network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 854.819298] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961336, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.828651] env[68217]: DEBUG oslo_vmware.api [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961338, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.830244] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 854.830244] env[68217]: value = "task-2961339" [ 854.830244] env[68217]: _type = "Task" [ 854.830244] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.841076] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961339, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.887052] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received event network-vif-unplugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 854.887334] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 854.887582] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 854.887776] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 854.888015] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] No waiting events found dispatching network-vif-unplugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 854.888131] env[68217]: WARNING nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received unexpected event network-vif-unplugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 for instance with vm_state shelved_offloaded and task_state None. [ 854.888342] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received event network-changed-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 854.888556] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Refreshing instance network info cache due to event network-changed-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 854.888814] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Acquiring lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.888983] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Acquired lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 854.889222] env[68217]: DEBUG nova.network.neutron [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Refreshing network info cache for port 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 855.162112] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: cdc84742-e20a-4e48-bfff-b3ac34405c1d] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 855.221698] env[68217]: DEBUG oslo_vmware.api [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961335, 'name': PowerOnVM_Task, 'duration_secs': 0.602194} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.222050] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.222291] env[68217]: INFO nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Took 11.87 seconds to spawn the instance on the hypervisor. [ 855.222525] env[68217]: DEBUG nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.223924] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d737dfcb-f606-4a12-a124-774078ac0227 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.230020] env[68217]: INFO nova.compute.manager [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] instance snapshotting [ 855.237662] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f57df8-fbe3-4577-9267-57de7d6b95f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.260219] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-724c942d-40cf-4275-a8a6-03cee53562ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.271692] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961337, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.278581] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961336, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.663627} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.279967] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 62628aed-e2f9-478f-bed7-00757fc3c484/62628aed-e2f9-478f-bed7-00757fc3c484.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 855.280195] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 855.280696] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5bd2fa2a-4fd6-464e-8330-847d36defad3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.291548] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 855.291548] env[68217]: value = "task-2961340" [ 855.291548] env[68217]: _type = "Task" [ 855.291548] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.294503] env[68217]: DEBUG oslo_vmware.api [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961338, 'name': PowerOffVM_Task, 'duration_secs': 0.317668} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.297977] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 855.298079] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 855.298363] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-956d3a42-f3ca-46fa-9176-e1933856065f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.306838] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961340, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.342927] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961339, 'name': CreateVM_Task, 'duration_secs': 0.506679} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.342927] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 855.344071] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.344071] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.344178] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 855.345497] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-662555c2-7049-4ec3-807f-c19047bc93f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.350021] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 855.350021] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d30366-c993-aa23-e217-20450efc89c2" [ 855.350021] env[68217]: _type = "Task" [ 855.350021] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.358435] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d30366-c993-aa23-e217-20450efc89c2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.367537] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 855.367740] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 855.367957] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Deleting the datastore file [datastore1] 2e3dae16-dba3-4230-913d-7a5c3469e36e {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 855.368232] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7bc4572c-5980-4ec8-a1d6-70547d2f2180 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.374394] env[68217]: DEBUG oslo_vmware.api [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for the task: (returnval){ [ 855.374394] env[68217]: value = "task-2961342" [ 855.374394] env[68217]: _type = "Task" [ 855.374394] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.383496] env[68217]: DEBUG oslo_vmware.api [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.384411] env[68217]: DEBUG nova.network.neutron [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 855.539509] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 3.334s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 855.542666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.086s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 855.543172] env[68217]: DEBUG nova.objects.instance [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lazy-loading 'resources' on Instance uuid 149bd497-4ee6-4ca2-9d18-b276e773aedf {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 855.665094] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 855.665379] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Cleaning up deleted instances with incomplete migration {{(pid=68217) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 855.753391] env[68217]: INFO nova.compute.manager [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Took 31.26 seconds to build instance. [ 855.758729] env[68217]: DEBUG oslo_vmware.api [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961337, 'name': PowerOnVM_Task, 'duration_secs': 0.566848} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.762824] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.765814] env[68217]: DEBUG nova.compute.manager [None req-ed8eb4d3-8ac0-49e1-85ba-1b0d8d9d352e tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 855.766851] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7c24cdd-2eb3-4529-935b-d668dce8f6ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.781613] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 855.784017] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-53a53141-a82a-48a4-aaa2-d38aeb74ecfe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.789645] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 855.789645] env[68217]: value = "task-2961343" [ 855.789645] env[68217]: _type = "Task" [ 855.789645] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.800890] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961343, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.809376] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961340, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068101} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.809376] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 855.809376] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c29d7874-dbdb-46c9-bc1f-dba0ba98cd5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.832380] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Reconfiguring VM instance instance-00000044 to attach disk [datastore2] 62628aed-e2f9-478f-bed7-00757fc3c484/62628aed-e2f9-478f-bed7-00757fc3c484.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 855.833468] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-886f5d36-281d-4719-9f1f-85fe6ea8861a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.855477] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 855.855477] env[68217]: value = "task-2961344" [ 855.855477] env[68217]: _type = "Task" [ 855.855477] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.862787] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d30366-c993-aa23-e217-20450efc89c2, 'name': SearchDatastore_Task, 'duration_secs': 0.023231} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.863483] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 855.863747] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.864029] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.864189] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 855.864368] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.865096] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-295ac248-76fa-46ff-9bd9-6544c24c305b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.869827] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961344, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.876942] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.877249] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.881671] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f84050d-900a-4fd8-987b-691316bdc232 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.890961] env[68217]: DEBUG oslo_vmware.api [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Task: {'id': task-2961342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.497607} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.892419] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 855.893198] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 855.893198] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 855.893198] env[68217]: INFO nova.compute.manager [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Took 1.19 seconds to destroy the instance on the hypervisor. [ 855.893198] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 855.893555] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 855.893555] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529e61e0-804d-8777-cca8-982bd8cd4c03" [ 855.893555] env[68217]: _type = "Task" [ 855.893555] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.893677] env[68217]: DEBUG nova.compute.manager [-] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 855.893793] env[68217]: DEBUG nova.network.neutron [-] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 855.903776] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529e61e0-804d-8777-cca8-982bd8cd4c03, 'name': SearchDatastore_Task, 'duration_secs': 0.009235} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.904559] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e1a2ac08-60c4-4a62-98c1-82ec29b0027e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.909511] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 855.909511] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52006a42-8b99-7792-f84b-de55df225fe9" [ 855.909511] env[68217]: _type = "Task" [ 855.909511] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.920579] env[68217]: DEBUG nova.network.neutron [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Updating instance_info_cache with network_info: [{"id": "56c6f7cc-e41b-4494-84b7-8925f5d7e59d", "address": "fa:16:3e:fc:01:dd", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c6f7cc-e4", "ovs_interfaceid": "56c6f7cc-e41b-4494-84b7-8925f5d7e59d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.922227] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52006a42-8b99-7792-f84b-de55df225fe9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.084018] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updated VIF entry in instance network info cache for port 6f6f347d-9ee4-4b18-9c77-67f67ee873e6. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 856.084018] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [{"id": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "address": "fa:16:3e:25:6f:1a", "network": {"id": "2a4968db-54c5-475c-9fc7-56d5f6441110", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-553739001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "a6348b1f20794ee9a016d409eee576e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6f6f347d-9e", "ovs_interfaceid": "6f6f347d-9ee4-4b18-9c77-67f67ee873e6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.121216] env[68217]: INFO nova.scheduler.client.report [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Deleted allocation for migration e43f38fa-6f4b-4977-b7ad-6a2e6c8e9fb3 [ 856.167583] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 856.175261] env[68217]: DEBUG nova.network.neutron [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updated VIF entry in instance network info cache for port 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 856.175649] env[68217]: DEBUG nova.network.neutron [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": null, "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap686a0657-d9", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.255532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca794bb1-eeca-4929-a2f0-af3163b7360a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 32.769s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.305682] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961343, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.367236] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961344, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.373288] env[68217]: DEBUG nova.compute.manager [req-da4a8465-9db0-4159-b7e3-e3893092ada4 req-382854e6-d31a-4c61-88eb-228bfaea571f service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Received event network-vif-deleted-6f6f347d-9ee4-4b18-9c77-67f67ee873e6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.373441] env[68217]: INFO nova.compute.manager [req-da4a8465-9db0-4159-b7e3-e3893092ada4 req-382854e6-d31a-4c61-88eb-228bfaea571f service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Neutron deleted interface 6f6f347d-9ee4-4b18-9c77-67f67ee873e6; detaching it from the instance and deleting it from the info cache [ 856.373674] env[68217]: DEBUG nova.network.neutron [req-da4a8465-9db0-4159-b7e3-e3893092ada4 req-382854e6-d31a-4c61-88eb-228bfaea571f service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.422513] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52006a42-8b99-7792-f84b-de55df225fe9, 'name': SearchDatastore_Task, 'duration_secs': 0.008814} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.423784] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.424097] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 09a8469d-567c-4247-96eb-edf0f4040f65/09a8469d-567c-4247-96eb-edf0f4040f65.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.426157] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b528a0e2-0e8c-4589-ba47-648e66bcc1b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.430451] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "refresh_cache-3af571ce-c400-45a1-97ad-4fbd53395129" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.430726] env[68217]: DEBUG nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Instance network_info: |[{"id": "56c6f7cc-e41b-4494-84b7-8925f5d7e59d", "address": "fa:16:3e:fc:01:dd", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c6f7cc-e4", "ovs_interfaceid": "56c6f7cc-e41b-4494-84b7-8925f5d7e59d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 856.434261] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fc:01:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '56c6f7cc-e41b-4494-84b7-8925f5d7e59d', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 856.442499] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating folder: Project (0d6233e9874c41329f81c990f8bc72b1). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.444777] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-353fddfb-dc36-4633-8a39-015f8b916f41 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.449374] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 856.449374] env[68217]: value = "task-2961345" [ 856.449374] env[68217]: _type = "Task" [ 856.449374] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.455697] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created folder: Project (0d6233e9874c41329f81c990f8bc72b1) in parent group-v594094. [ 856.455808] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating folder: Instances. Parent ref: group-v594295. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 856.457224] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-17ef15f2-e804-435a-8dbe-962176268953 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.465757] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961345, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.468603] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created folder: Instances in parent group-v594295. [ 856.468870] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 856.469082] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 856.469327] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1ace274a-242a-4b47-bd5d-6eb42bd16341 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.485759] env[68217]: DEBUG nova.network.neutron [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Successfully updated port: f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 856.499016] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 856.499016] env[68217]: value = "task-2961348" [ 856.499016] env[68217]: _type = "Task" [ 856.499016] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.503967] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961348, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.549977] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84d1e1c7-6c8e-441a-a238-6ae18b42ee80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.559489] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b80bbca-1891-49a4-b9d5-73079b4f8af4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.596650] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.597544] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Releasing lock "refresh_cache-2e3dae16-dba3-4230-913d-7a5c3469e36e" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.597780] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Received event network-vif-plugged-ed3866d2-77b3-4f38-903b-76d8b6349fc8 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.597965] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Acquiring lock "09a8469d-567c-4247-96eb-edf0f4040f65-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.598174] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Lock "09a8469d-567c-4247-96eb-edf0f4040f65-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.598390] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Lock "09a8469d-567c-4247-96eb-edf0f4040f65-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.598590] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] No waiting events found dispatching network-vif-plugged-ed3866d2-77b3-4f38-903b-76d8b6349fc8 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 856.598759] env[68217]: WARNING nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Received unexpected event network-vif-plugged-ed3866d2-77b3-4f38-903b-76d8b6349fc8 for instance with vm_state building and task_state spawning. [ 856.598965] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Received event network-changed-ed3866d2-77b3-4f38-903b-76d8b6349fc8 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.599191] env[68217]: DEBUG nova.compute.manager [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Refreshing instance network info cache due to event network-changed-ed3866d2-77b3-4f38-903b-76d8b6349fc8. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 856.599404] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Acquiring lock "refresh_cache-09a8469d-567c-4247-96eb-edf0f4040f65" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.599556] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Acquired lock "refresh_cache-09a8469d-567c-4247-96eb-edf0f4040f65" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.599751] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Refreshing network info cache for port ed3866d2-77b3-4f38-903b-76d8b6349fc8 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.602848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a56e6d2-5ec1-40f6-828b-14e3646dfc92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.612507] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33e22b78-fd49-40b4-a278-2d32bed02300 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.631270] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9dee5d5f-a84c-4146-a8fd-d3f7e35b9f5d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 13.835s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.632501] env[68217]: DEBUG nova.compute.provider_tree [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 856.679537] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Releasing lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 856.679878] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Received event network-vif-plugged-56c6f7cc-e41b-4494-84b7-8925f5d7e59d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.680068] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Acquiring lock "3af571ce-c400-45a1-97ad-4fbd53395129-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.680175] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Lock "3af571ce-c400-45a1-97ad-4fbd53395129-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.680335] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Lock "3af571ce-c400-45a1-97ad-4fbd53395129-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.680497] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] No waiting events found dispatching network-vif-plugged-56c6f7cc-e41b-4494-84b7-8925f5d7e59d {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 856.680659] env[68217]: WARNING nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Received unexpected event network-vif-plugged-56c6f7cc-e41b-4494-84b7-8925f5d7e59d for instance with vm_state building and task_state spawning. [ 856.680818] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Received event network-changed-56c6f7cc-e41b-4494-84b7-8925f5d7e59d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.681052] env[68217]: DEBUG nova.compute.manager [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Refreshing instance network info cache due to event network-changed-56c6f7cc-e41b-4494-84b7-8925f5d7e59d. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 856.681249] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Acquiring lock "refresh_cache-3af571ce-c400-45a1-97ad-4fbd53395129" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.681383] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Acquired lock "refresh_cache-3af571ce-c400-45a1-97ad-4fbd53395129" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.681538] env[68217]: DEBUG nova.network.neutron [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Refreshing network info cache for port 56c6f7cc-e41b-4494-84b7-8925f5d7e59d {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.716567] env[68217]: DEBUG nova.network.neutron [-] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.807307] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961343, 'name': CreateSnapshot_Task, 'duration_secs': 0.861197} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.807649] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 856.808562] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7a0ab8-7382-4347-8fdf-470b4e7e32cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.870589] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961344, 'name': ReconfigVM_Task, 'duration_secs': 0.60921} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.871041] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Reconfigured VM instance instance-00000044 to attach disk [datastore2] 62628aed-e2f9-478f-bed7-00757fc3c484/62628aed-e2f9-478f-bed7-00757fc3c484.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 856.871919] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f2f2cfdc-3b57-4197-a6e1-9bc16a7c4c94 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.880861] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 856.880861] env[68217]: value = "task-2961349" [ 856.880861] env[68217]: _type = "Task" [ 856.880861] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.881197] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86238eb0-b2e2-4416-9f0e-1043b5c70526 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.893801] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961349, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.897723] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a51d53d4-4da3-44fa-912c-a7db39c8b1f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.941572] env[68217]: INFO nova.compute.manager [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Unrescuing [ 856.941936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.942158] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquired lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.942350] env[68217]: DEBUG nova.network.neutron [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 856.944194] env[68217]: DEBUG nova.compute.manager [req-da4a8465-9db0-4159-b7e3-e3893092ada4 req-382854e6-d31a-4c61-88eb-228bfaea571f service nova] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Detach interface failed, port_id=6f6f347d-9ee4-4b18-9c77-67f67ee873e6, reason: Instance 2e3dae16-dba3-4230-913d-7a5c3469e36e could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 856.964279] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961345, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.982204] env[68217]: DEBUG nova.compute.manager [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Received event network-vif-plugged-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.982636] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] Acquiring lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 856.982910] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] Lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 856.983188] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] Lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 856.983419] env[68217]: DEBUG nova.compute.manager [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] No waiting events found dispatching network-vif-plugged-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 856.983656] env[68217]: WARNING nova.compute.manager [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Received unexpected event network-vif-plugged-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 for instance with vm_state building and task_state spawning. [ 856.983866] env[68217]: DEBUG nova.compute.manager [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Received event network-changed-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 856.984144] env[68217]: DEBUG nova.compute.manager [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Refreshing instance network info cache due to event network-changed-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 856.984376] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] Acquiring lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 856.984535] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] Acquired lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 856.984704] env[68217]: DEBUG nova.network.neutron [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Refreshing network info cache for port f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 856.991609] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.009040] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961348, 'name': CreateVM_Task, 'duration_secs': 0.496773} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.009252] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 857.010016] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.010222] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.010563] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 857.010867] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f9e8f01a-7c59-4366-8ae6-7c8c51ddadc0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.016167] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 857.016167] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5209cc44-ee79-f608-f6d6-3c98a74b4ad2" [ 857.016167] env[68217]: _type = "Task" [ 857.016167] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.027256] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5209cc44-ee79-f608-f6d6-3c98a74b4ad2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.135825] env[68217]: DEBUG nova.scheduler.client.report [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 857.219242] env[68217]: INFO nova.compute.manager [-] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Took 1.33 seconds to deallocate network for instance. [ 857.328130] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 857.331033] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-629298f9-b52f-41ab-9ccd-7ae4be31fb0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.340836] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 857.340836] env[68217]: value = "task-2961350" [ 857.340836] env[68217]: _type = "Task" [ 857.340836] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.350531] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961350, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.397872] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961349, 'name': Rename_Task, 'duration_secs': 0.193638} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.399403] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 857.399760] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7da99d32-1858-498c-9081-96acd86ba139 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.409065] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 857.409065] env[68217]: value = "task-2961351" [ 857.409065] env[68217]: _type = "Task" [ 857.409065] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.417768] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961351, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.462250] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961345, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.709381} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.462554] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 09a8469d-567c-4247-96eb-edf0f4040f65/09a8469d-567c-4247-96eb-edf0f4040f65.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.462792] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.463118] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e3dbc1d1-1baf-4f27-b9a3-80aab1388b47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.471897] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Updated VIF entry in instance network info cache for port ed3866d2-77b3-4f38-903b-76d8b6349fc8. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.472253] env[68217]: DEBUG nova.network.neutron [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Updating instance_info_cache with network_info: [{"id": "ed3866d2-77b3-4f38-903b-76d8b6349fc8", "address": "fa:16:3e:a6:47:4c", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "taped3866d2-77", "ovs_interfaceid": "ed3866d2-77b3-4f38-903b-76d8b6349fc8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.474956] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 857.474956] env[68217]: value = "task-2961352" [ 857.474956] env[68217]: _type = "Task" [ 857.474956] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.490110] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961352, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.529281] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5209cc44-ee79-f608-f6d6-3c98a74b4ad2, 'name': SearchDatastore_Task, 'duration_secs': 0.059269} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.529648] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.529940] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 857.530204] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 857.530375] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 857.530568] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 857.531358] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9fe69784-eb8e-4df1-82d9-570f8cf13e11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.540468] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 857.540733] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 857.542394] env[68217]: DEBUG nova.network.neutron [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Updated VIF entry in instance network info cache for port 56c6f7cc-e41b-4494-84b7-8925f5d7e59d. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 857.542757] env[68217]: DEBUG nova.network.neutron [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Updating instance_info_cache with network_info: [{"id": "56c6f7cc-e41b-4494-84b7-8925f5d7e59d", "address": "fa:16:3e:fc:01:dd", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap56c6f7cc-e4", "ovs_interfaceid": "56c6f7cc-e41b-4494-84b7-8925f5d7e59d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.544128] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8de697d5-bd1d-4e68-bb6e-0aa368979290 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.551506] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 857.551506] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529f432b-8d58-ee4b-298b-294a31b31cfe" [ 857.551506] env[68217]: _type = "Task" [ 857.551506] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.562418] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529f432b-8d58-ee4b-298b-294a31b31cfe, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.641165] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.098s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 857.643799] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.436s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 857.644152] env[68217]: DEBUG nova.objects.instance [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lazy-loading 'resources' on Instance uuid 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 857.671490] env[68217]: INFO nova.scheduler.client.report [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleted allocations for instance 149bd497-4ee6-4ca2-9d18-b276e773aedf [ 857.726021] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 857.840649] env[68217]: DEBUG nova.network.neutron [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 857.855994] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961350, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.924356] env[68217]: DEBUG oslo_vmware.api [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961351, 'name': PowerOnVM_Task, 'duration_secs': 0.502802} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.924766] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 857.925098] env[68217]: INFO nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Took 12.05 seconds to spawn the instance on the hypervisor. [ 857.925632] env[68217]: DEBUG nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 857.927158] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222e541d-8773-4faf-8aec-4fc070faab4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.976017] env[68217]: DEBUG oslo_concurrency.lockutils [req-42313fe6-ebe3-4314-a8ee-6084103854d0 req-e01c0eeb-82f7-450e-a541-5542b8634497 service nova] Releasing lock "refresh_cache-09a8469d-567c-4247-96eb-edf0f4040f65" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 857.989072] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961352, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092052} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.989072] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.989072] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321ca077-dc0d-4dc4-8f2b-392bb855e5d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.017030] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Reconfiguring VM instance instance-00000046 to attach disk [datastore1] 09a8469d-567c-4247-96eb-edf0f4040f65/09a8469d-567c-4247-96eb-edf0f4040f65.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 858.019971] env[68217]: DEBUG nova.network.neutron [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.022621] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b864c2ee-9e96-40da-81b4-c79bbce645b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.047401] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 858.047401] env[68217]: value = "task-2961353" [ 858.047401] env[68217]: _type = "Task" [ 858.047401] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.048424] env[68217]: DEBUG oslo_concurrency.lockutils [req-d4bd8d8e-986e-40a4-959f-22f59171127b req-28a3540c-5dcd-46f5-9aff-7de404c9ecff service nova] Releasing lock "refresh_cache-3af571ce-c400-45a1-97ad-4fbd53395129" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.062504] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961353, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.066549] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529f432b-8d58-ee4b-298b-294a31b31cfe, 'name': SearchDatastore_Task, 'duration_secs': 0.011981} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.067358] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a07ce22-430d-478e-b78a-bb0d5a5fd6de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.073914] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 858.073914] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52685e8a-a97b-8f87-9c99-974b54e4cdaf" [ 858.073914] env[68217]: _type = "Task" [ 858.073914] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.082700] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52685e8a-a97b-8f87-9c99-974b54e4cdaf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.147107] env[68217]: DEBUG nova.objects.instance [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lazy-loading 'numa_topology' on Instance uuid 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.183385] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b916a3e6-6eec-4f3a-970c-930935b81801 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "149bd497-4ee6-4ca2-9d18-b276e773aedf" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.902s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 858.193079] env[68217]: DEBUG nova.network.neutron [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Updating instance_info_cache with network_info: [{"id": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "address": "fa:16:3e:ec:b6:8c", "network": {"id": "9b382d3b-5356-4cee-b6ae-e9a825915fe9", "bridge": "br-int", "label": "tempest-ServerRescueTestJSON-894310318-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "bde4bb32b82948dd991d1fb8890c991b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4162774e-ec80-4d85-aeb4-fae77f197393", "external-id": "nsx-vlan-transportzone-542", "segmentation_id": 542, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1eb632e3-fe", "ovs_interfaceid": "1eb632e3-fe01-4d72-a4ea-834af75497ef", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.356167] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961350, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.455213] env[68217]: INFO nova.compute.manager [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Took 22.07 seconds to build instance. [ 858.541832] env[68217]: DEBUG oslo_concurrency.lockutils [req-ea046136-b20a-4745-9c13-6d8dde612808 req-59135eda-bbb3-4d80-9997-7af464788c31 service nova] Releasing lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.543048] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 858.543048] env[68217]: DEBUG nova.network.neutron [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 858.559428] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961353, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.584631] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52685e8a-a97b-8f87-9c99-974b54e4cdaf, 'name': SearchDatastore_Task, 'duration_secs': 0.018202} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.584878] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.585215] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 3af571ce-c400-45a1-97ad-4fbd53395129/3af571ce-c400-45a1-97ad-4fbd53395129.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 858.585511] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d8f9c84c-a95b-4f4e-a604-c802740a04d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.592713] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 858.592713] env[68217]: value = "task-2961354" [ 858.592713] env[68217]: _type = "Task" [ 858.592713] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.601682] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961354, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.649371] env[68217]: DEBUG nova.objects.base [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Object Instance<58c15727-79ae-404f-a054-d71e3be498cc> lazy-loaded attributes: resources,numa_topology {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 858.696483] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Releasing lock "refresh_cache-09290e60-7751-408e-9d6d-20e7cb61767b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 858.696998] env[68217]: DEBUG nova.objects.instance [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lazy-loading 'flavor' on Instance uuid 09290e60-7751-408e-9d6d-20e7cb61767b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 858.860282] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961350, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.958600] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e71e42e0-091d-4b7b-bd67-5266e5f51ae2 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "62628aed-e2f9-478f-bed7-00757fc3c484" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.586s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 859.066561] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961353, 'name': ReconfigVM_Task, 'duration_secs': 0.618363} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.067178] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Reconfigured VM instance instance-00000046 to attach disk [datastore1] 09a8469d-567c-4247-96eb-edf0f4040f65/09a8469d-567c-4247-96eb-edf0f4040f65.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 859.069169] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-87f58184-f1e1-4639-a77b-4b3b02bea069 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.076659] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 859.076659] env[68217]: value = "task-2961355" [ 859.076659] env[68217]: _type = "Task" [ 859.076659] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.090042] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961355, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.108164] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961354, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.108164] env[68217]: DEBUG nova.network.neutron [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 859.137740] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3b9226c-8b2d-4337-9c1d-a4607e888a7e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.151466] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584c76ad-54cf-4bc9-9808-522ade9ec937 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.203245] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12a07d3-5492-4883-a7ef-ca2b64233757 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.213592] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "a513976b-4859-4822-8989-c9452db62ee6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.213875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "a513976b-4859-4822-8989-c9452db62ee6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 859.215650] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-322f5a0b-c3b8-4743-8aaf-6f76594ab0f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.244210] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eee88781-3e5a-44a3-9bfb-f96cd8457e1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.248976] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 859.249610] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1034ac54-37a6-4c26-89c8-c45b8de6dd2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.261872] env[68217]: DEBUG nova.compute.provider_tree [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.264572] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 859.264572] env[68217]: value = "task-2961356" [ 859.264572] env[68217]: _type = "Task" [ 859.264572] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.273811] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961356, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.339420] env[68217]: DEBUG nova.compute.manager [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 859.357477] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961350, 'name': CloneVM_Task, 'duration_secs': 1.681951} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.357816] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Created linked-clone VM from snapshot [ 859.358604] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb0af51-39d3-41da-be55-7721f3c762b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.367923] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Uploading image 9f4edacb-625d-403d-beb9-916f1ffd1cd7 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 859.391820] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 859.391820] env[68217]: value = "vm-594299" [ 859.391820] env[68217]: _type = "VirtualMachine" [ 859.391820] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 859.392499] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-632fb6c0-b188-4f7f-8f52-2f3bcc3580ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.399870] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease: (returnval){ [ 859.399870] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bbe2e-74de-4cdd-fd1b-97f9962843c6" [ 859.399870] env[68217]: _type = "HttpNfcLease" [ 859.399870] env[68217]: } obtained for exporting VM: (result){ [ 859.399870] env[68217]: value = "vm-594299" [ 859.399870] env[68217]: _type = "VirtualMachine" [ 859.399870] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 859.400127] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the lease: (returnval){ [ 859.400127] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bbe2e-74de-4cdd-fd1b-97f9962843c6" [ 859.400127] env[68217]: _type = "HttpNfcLease" [ 859.400127] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 859.406824] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.406824] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bbe2e-74de-4cdd-fd1b-97f9962843c6" [ 859.406824] env[68217]: _type = "HttpNfcLease" [ 859.406824] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 859.431997] env[68217]: DEBUG nova.network.neutron [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Updating instance_info_cache with network_info: [{"id": "f10ba1e7-ec20-4ece-a5e6-c0e47e42e986", "address": "fa:16:3e:bd:30:8a", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf10ba1e7-ec", "ovs_interfaceid": "f10ba1e7-ec20-4ece-a5e6-c0e47e42e986", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.586942] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961355, 'name': Rename_Task, 'duration_secs': 0.290907} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.587579] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 859.587886] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-32c1fc8f-d218-42d2-9959-7474b8a7019a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.599798] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 859.599798] env[68217]: value = "task-2961358" [ 859.599798] env[68217]: _type = "Task" [ 859.599798] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.611184] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961354, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671127} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.612299] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 3af571ce-c400-45a1-97ad-4fbd53395129/3af571ce-c400-45a1-97ad-4fbd53395129.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 859.613126] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 859.613126] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-510cb092-1095-4676-a572-9c99d3cf0250 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.622175] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961358, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.627571] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 859.627571] env[68217]: value = "task-2961359" [ 859.627571] env[68217]: _type = "Task" [ 859.627571] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.640096] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961359, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.723028] env[68217]: DEBUG nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 859.773095] env[68217]: DEBUG nova.scheduler.client.report [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 859.779391] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961356, 'name': PowerOffVM_Task, 'duration_secs': 0.397889} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.779649] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 859.785163] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Reconfiguring VM instance instance-00000041 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 859.786009] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-75304c0f-5dd2-4d12-8de7-972fa2102495 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.804794] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 859.804794] env[68217]: value = "task-2961360" [ 859.804794] env[68217]: _type = "Task" [ 859.804794] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.813327] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961360, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.859173] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 859.879238] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ade483-ce28-afa1-dff0-10bbd7c1f7c2/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 859.880204] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84e179c1-1dc3-467b-9a36-10d6cb24ad99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.886337] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ade483-ce28-afa1-dff0-10bbd7c1f7c2/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 859.886499] env[68217]: ERROR oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ade483-ce28-afa1-dff0-10bbd7c1f7c2/disk-0.vmdk due to incomplete transfer. [ 859.886718] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-fbf07d6d-0db7-4287-9e05-6d9dee5b2374 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.893436] env[68217]: DEBUG oslo_vmware.rw_handles [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52ade483-ce28-afa1-dff0-10bbd7c1f7c2/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 859.893671] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Uploaded image 73846363-8ec9-4ba7-8de7-f03414dbfcf7 to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 859.896224] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 859.896466] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2e0c0b6a-e666-436b-9eec-404b02e83349 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.905344] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 859.905344] env[68217]: value = "task-2961361" [ 859.905344] env[68217]: _type = "Task" [ 859.905344] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 859.910072] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 859.910072] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bbe2e-74de-4cdd-fd1b-97f9962843c6" [ 859.910072] env[68217]: _type = "HttpNfcLease" [ 859.910072] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 859.910705] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 859.910705] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bbe2e-74de-4cdd-fd1b-97f9962843c6" [ 859.910705] env[68217]: _type = "HttpNfcLease" [ 859.910705] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 859.911546] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4358cf0-c760-4827-a6ab-2d3c814daaa9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.916869] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961361, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.921670] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52593c63-4ccf-b90f-c363-528d04c5f1c6/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 859.921842] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52593c63-4ccf-b90f-c363-528d04c5f1c6/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 859.979065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 859.979397] env[68217]: DEBUG nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Instance network_info: |[{"id": "f10ba1e7-ec20-4ece-a5e6-c0e47e42e986", "address": "fa:16:3e:bd:30:8a", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf10ba1e7-ec", "ovs_interfaceid": "f10ba1e7-ec20-4ece-a5e6-c0e47e42e986", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 859.979978] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bd:30:8a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f10ba1e7-ec20-4ece-a5e6-c0e47e42e986', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 859.987687] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 859.989114] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 859.989366] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe82ea38-8f4d-4756-9c0b-7e63a4f9313e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.009764] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.009764] env[68217]: value = "task-2961362" [ 860.009764] env[68217]: _type = "Task" [ 860.009764] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.018437] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961362, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.110718] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961358, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.117099] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3fa4d154-1360-45d3-b988-baf406891ea5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.127699] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.127699] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.137703] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961359, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.084046} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.138567] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 860.139536] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8cef138-343f-4b58-9286-4c31418962a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.168040] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Reconfiguring VM instance instance-00000047 to attach disk [datastore1] 3af571ce-c400-45a1-97ad-4fbd53395129/3af571ce-c400-45a1-97ad-4fbd53395129.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 860.175949] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8ec4ddb-1c7e-4824-8ce1-311812ab17bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.200385] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 860.200385] env[68217]: value = "task-2961363" [ 860.200385] env[68217]: _type = "Task" [ 860.200385] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.209913] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961363, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.252697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 860.276730] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.632s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.278017] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.552s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.282023] env[68217]: DEBUG nova.objects.instance [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lazy-loading 'resources' on Instance uuid 2e3dae16-dba3-4230-913d-7a5c3469e36e {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 860.317477] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961360, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.416215] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961361, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.503978] env[68217]: DEBUG nova.compute.manager [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Received event network-changed-5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 860.504549] env[68217]: DEBUG nova.compute.manager [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Refreshing instance network info cache due to event network-changed-5186da63-8f63-49b5-8750-b04b51317122. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 860.504742] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] Acquiring lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.504967] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] Acquired lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 860.505184] env[68217]: DEBUG nova.network.neutron [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Refreshing network info cache for port 5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 860.520615] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961362, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.613750] env[68217]: DEBUG oslo_vmware.api [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961358, 'name': PowerOnVM_Task, 'duration_secs': 1.00033} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.614318] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 860.615841] env[68217]: INFO nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Took 11.53 seconds to spawn the instance on the hypervisor. [ 860.616143] env[68217]: DEBUG nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 860.618130] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af950007-7d43-47dc-b368-19fe7fd9a74d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.635433] env[68217]: DEBUG nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 860.712186] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961363, 'name': ReconfigVM_Task, 'duration_secs': 0.44157} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.712568] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Reconfigured VM instance instance-00000047 to attach disk [datastore1] 3af571ce-c400-45a1-97ad-4fbd53395129/3af571ce-c400-45a1-97ad-4fbd53395129.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 860.713293] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-25bc1c27-79d7-4186-a957-75efe6123a05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.720123] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 860.720123] env[68217]: value = "task-2961364" [ 860.720123] env[68217]: _type = "Task" [ 860.720123] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.729848] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961364, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.788411] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd6a7d9-122c-4b51-8b05-9c68fa1bc85c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 31.938s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 860.789549] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 4.193s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 860.790131] env[68217]: INFO nova.compute.manager [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Unshelving [ 860.824551] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961360, 'name': ReconfigVM_Task, 'duration_secs': 0.65606} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.828541] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Reconfigured VM instance instance-00000041 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 860.829031] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 860.831101] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3e94526f-12f1-4336-85c2-df671507006b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.839434] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 860.839434] env[68217]: value = "task-2961365" [ 860.839434] env[68217]: _type = "Task" [ 860.839434] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.849294] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961365, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.924914] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961361, 'name': Destroy_Task, 'duration_secs': 0.742729} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.925424] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Destroyed the VM [ 860.925779] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 860.926186] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-54cbd3ca-27f7-47be-9394-d4dd8c39609e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.934490] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 860.934490] env[68217]: value = "task-2961366" [ 860.934490] env[68217]: _type = "Task" [ 860.934490] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.947599] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961366, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.025745] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961362, 'name': CreateVM_Task, 'duration_secs': 0.566511} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.026239] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 861.026747] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.027186] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.027571] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 861.029135] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe6984e6-9d17-471c-9d9b-fa8cc61f4101 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.033784] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 861.033784] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b0ee9d-6645-d702-1667-4e5cbbd269ca" [ 861.033784] env[68217]: _type = "Task" [ 861.033784] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.045865] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b0ee9d-6645-d702-1667-4e5cbbd269ca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.143826] env[68217]: INFO nova.compute.manager [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Took 18.09 seconds to build instance. [ 861.165588] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.236224] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961364, 'name': Rename_Task, 'duration_secs': 0.198907} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.237261] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 861.237581] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-79eceefa-40a6-4320-99b9-84601f1a629d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.246289] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 861.246289] env[68217]: value = "task-2961367" [ 861.246289] env[68217]: _type = "Task" [ 861.246289] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.258381] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961367, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.336512] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0a333d6-9e4f-4c08-b821-ecb8ff1950cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.347419] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69f629ae-6913-4ab5-8c29-c434cd6ee0b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.355131] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961365, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.391198] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcc54fd5-03a9-4d52-8702-9c4b192a32e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.400879] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dc0dda-d7af-4ab7-ab10-d6c7eaa5ef14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.417382] env[68217]: DEBUG nova.compute.provider_tree [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.447207] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961366, 'name': RemoveSnapshot_Task} progress is 26%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.546880] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b0ee9d-6645-d702-1667-4e5cbbd269ca, 'name': SearchDatastore_Task, 'duration_secs': 0.013891} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.547289] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 861.547591] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.547842] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.548011] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 861.548344] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.548649] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b89c69b5-cee4-4b25-ae6a-a6d64a0ea693 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.554258] env[68217]: DEBUG nova.network.neutron [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updated VIF entry in instance network info cache for port 5186da63-8f63-49b5-8750-b04b51317122. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 861.554614] env[68217]: DEBUG nova.network.neutron [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updating instance_info_cache with network_info: [{"id": "5186da63-8f63-49b5-8750-b04b51317122", "address": "fa:16:3e:05:bd:f2", "network": {"id": "2046820f-fbc9-4e17-97f7-bb97bc53083f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-34123343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.134", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b359376e18b4c878d281b8fbec28f69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186da63-8f", "ovs_interfaceid": "5186da63-8f63-49b5-8750-b04b51317122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.559656] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.559859] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.560619] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0caaef7b-8d7f-4a85-a3a0-60df310de829 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.566511] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 861.566511] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2a38e-a208-b168-7e35-d4e8ebd35784" [ 861.566511] env[68217]: _type = "Task" [ 861.566511] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.576133] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2a38e-a208-b168-7e35-d4e8ebd35784, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.649358] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2e66e38f-3b0c-4cae-aa50-39aa86088f4d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "09a8469d-567c-4247-96eb-edf0f4040f65" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.611s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 861.759101] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961367, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.822099] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 861.852212] env[68217]: DEBUG oslo_vmware.api [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961365, 'name': PowerOnVM_Task, 'duration_secs': 0.549874} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.852577] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 861.852842] env[68217]: DEBUG nova.compute.manager [None req-a9efd932-d2df-458c-92e3-c8b36fefd609 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 861.853697] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6c3dd63-7dd5-4360-8c42-b3d96311094f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.919640] env[68217]: DEBUG nova.scheduler.client.report [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 861.949717] env[68217]: DEBUG oslo_vmware.api [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961366, 'name': RemoveSnapshot_Task, 'duration_secs': 0.865281} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.949717] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 861.950138] env[68217]: INFO nova.compute.manager [None req-603fcd82-22d2-48a0-9d59-18266706fcd0 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Took 16.19 seconds to snapshot the instance on the hypervisor. [ 862.058057] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd9637e7-3236-4c1b-8e53-72e29904145b req-f228c59f-8a01-47ad-ad66-ce0340227502 service nova] Releasing lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.079013] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c2a38e-a208-b168-7e35-d4e8ebd35784, 'name': SearchDatastore_Task, 'duration_secs': 0.013042} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.079887] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5906026c-44bb-4af7-98cb-b4b18db6f530 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.089777] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 862.089777] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e9d9a-c7b4-8051-2f50-06c7e3cbd9b0" [ 862.089777] env[68217]: _type = "Task" [ 862.089777] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.101825] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e9d9a-c7b4-8051-2f50-06c7e3cbd9b0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.258261] env[68217]: DEBUG oslo_vmware.api [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961367, 'name': PowerOnVM_Task, 'duration_secs': 0.650016} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.258261] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 862.258447] env[68217]: INFO nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Took 10.61 seconds to spawn the instance on the hypervisor. [ 862.258568] env[68217]: DEBUG nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 862.259435] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a6e3852-7e2b-4e8d-a7a3-82ce25cf7572 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.431026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.150s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 862.431026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.571s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.453021] env[68217]: INFO nova.scheduler.client.report [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Deleted allocations for instance 2e3dae16-dba3-4230-913d-7a5c3469e36e [ 862.601841] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e9d9a-c7b4-8051-2f50-06c7e3cbd9b0, 'name': SearchDatastore_Task, 'duration_secs': 0.011152} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.601841] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 862.601841] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 03d61c68-1b37-4172-b276-67a73a0dc228/03d61c68-1b37-4172-b276-67a73a0dc228.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.601841] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fd44cbec-2c71-464e-a4a9-8b09bd41e5a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.609659] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 862.609659] env[68217]: value = "task-2961368" [ 862.609659] env[68217]: _type = "Task" [ 862.609659] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.618420] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961368, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.782664] env[68217]: INFO nova.compute.manager [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Took 17.95 seconds to build instance. [ 862.896679] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "ca9ef7ff-b942-4363-a4f8-9163791ec162" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 862.896921] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 862.939681] env[68217]: INFO nova.compute.claims [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.961464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0f418e54-0fa1-4df4-9962-629569718598 tempest-ServerRescueTestJSONUnderV235-212825120 tempest-ServerRescueTestJSONUnderV235-212825120-project-member] Lock "2e3dae16-dba3-4230-913d-7a5c3469e36e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.778s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.124012] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961368, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.286123] env[68217]: DEBUG oslo_concurrency.lockutils [None req-48cbc623-2402-4f70-b662-4e3418bea99f tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "3af571ce-c400-45a1-97ad-4fbd53395129" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.463s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 863.401162] env[68217]: DEBUG nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 863.446816] env[68217]: INFO nova.compute.resource_tracker [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating resource usage from migration 09e5617b-3f61-4244-8c01-a6a0f8233b59 [ 863.626343] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961368, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.553688} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 863.630063] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 03d61c68-1b37-4172-b276-67a73a0dc228/03d61c68-1b37-4172-b276-67a73a0dc228.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 863.630432] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 863.631101] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-90515c0a-68e9-495d-a5af-a28b794e3dda {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.639581] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 863.639581] env[68217]: value = "task-2961369" [ 863.639581] env[68217]: _type = "Task" [ 863.639581] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.650730] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.933028] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 863.961696] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b333ceb9-4917-4a16-a713-e86d98c7684d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.969603] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d72345c-9216-4ba6-acca-d26d012e6da3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.015995] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a955bfcb-4703-4978-bca0-b2a53cf8c7f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.023675] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7144c43f-ec3b-4968-be65-f56db723ba22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.032862] env[68217]: DEBUG nova.compute.manager [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 864.032862] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-615685ea-7022-4d09-9455-d04d943ce8b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.051021] env[68217]: DEBUG nova.compute.provider_tree [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.061968] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3a467ba3-3c0b-4974-a2f2-5187438ae947 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "f369b627-9dc3-44bf-8bbe-649aace7e66c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 864.062256] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3a467ba3-3c0b-4974-a2f2-5187438ae947 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "f369b627-9dc3-44bf-8bbe-649aace7e66c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 864.153025] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081516} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.153025] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.153025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85b61997-ac20-414e-8e19-8015c3cc4d6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.179340] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Reconfiguring VM instance instance-00000048 to attach disk [datastore1] 03d61c68-1b37-4172-b276-67a73a0dc228/03d61c68-1b37-4172-b276-67a73a0dc228.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.180137] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b233f8e-6b3f-4590-91b0-d48e1aa3a166 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.204290] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 864.204290] env[68217]: value = "task-2961370" [ 864.204290] env[68217]: _type = "Task" [ 864.204290] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.215322] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961370, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.551336] env[68217]: DEBUG nova.compute.manager [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Received event network-changed-5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 864.551536] env[68217]: DEBUG nova.compute.manager [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Refreshing instance network info cache due to event network-changed-5186da63-8f63-49b5-8750-b04b51317122. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 864.551713] env[68217]: DEBUG oslo_concurrency.lockutils [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] Acquiring lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.551854] env[68217]: DEBUG oslo_concurrency.lockutils [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] Acquired lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 864.552363] env[68217]: DEBUG nova.network.neutron [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Refreshing network info cache for port 5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 864.563660] env[68217]: DEBUG nova.scheduler.client.report [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 864.571297] env[68217]: INFO nova.compute.manager [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] instance snapshotting [ 864.572476] env[68217]: DEBUG nova.compute.manager [None req-3a467ba3-3c0b-4974-a2f2-5187438ae947 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: f369b627-9dc3-44bf-8bbe-649aace7e66c] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 864.578650] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d204d0-6409-4fef-80bd-b047f0f1b151 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.609325] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d20920-fe8f-4319-9530-46f1d6655bbf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.716272] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961370, 'name': ReconfigVM_Task, 'duration_secs': 0.510956} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.716652] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Reconfigured VM instance instance-00000048 to attach disk [datastore1] 03d61c68-1b37-4172-b276-67a73a0dc228/03d61c68-1b37-4172-b276-67a73a0dc228.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 864.717406] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c0337c3c-d380-49d2-8007-403b0f72729e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.726087] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 864.726087] env[68217]: value = "task-2961371" [ 864.726087] env[68217]: _type = "Task" [ 864.726087] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.735069] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961371, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.007032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "09290e60-7751-408e-9d6d-20e7cb61767b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.007032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "09290e60-7751-408e-9d6d-20e7cb61767b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.007438] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "09290e60-7751-408e-9d6d-20e7cb61767b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.007438] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "09290e60-7751-408e-9d6d-20e7cb61767b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.008054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "09290e60-7751-408e-9d6d-20e7cb61767b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.011235] env[68217]: INFO nova.compute.manager [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Terminating instance [ 865.078207] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.647s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.078436] env[68217]: INFO nova.compute.manager [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Migrating [ 865.089843] env[68217]: DEBUG nova.compute.manager [None req-3a467ba3-3c0b-4974-a2f2-5187438ae947 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: f369b627-9dc3-44bf-8bbe-649aace7e66c] Instance disappeared before build. {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2446}} [ 865.095824] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.843s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.097873] env[68217]: INFO nova.compute.claims [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.126857] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 865.127208] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f33bd12c-ab62-4236-aeb4-403633bcba33 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.140779] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 865.140779] env[68217]: value = "task-2961372" [ 865.140779] env[68217]: _type = "Task" [ 865.140779] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.158763] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961372, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.237832] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961371, 'name': Rename_Task, 'duration_secs': 0.210836} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.238355] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.239309] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81f41eb2-c742-474b-b2d0-97165a05ef43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.247101] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 865.247101] env[68217]: value = "task-2961373" [ 865.247101] env[68217]: _type = "Task" [ 865.247101] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.256989] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961373, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.463718] env[68217]: DEBUG nova.network.neutron [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updated VIF entry in instance network info cache for port 5186da63-8f63-49b5-8750-b04b51317122. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 865.464130] env[68217]: DEBUG nova.network.neutron [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updating instance_info_cache with network_info: [{"id": "5186da63-8f63-49b5-8750-b04b51317122", "address": "fa:16:3e:05:bd:f2", "network": {"id": "2046820f-fbc9-4e17-97f7-bb97bc53083f", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-34123343-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8b359376e18b4c878d281b8fbec28f69", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3d2e4070-a78e-4d08-a104-b6312ab65577", "external-id": "nsx-vlan-transportzone-292", "segmentation_id": 292, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5186da63-8f", "ovs_interfaceid": "5186da63-8f63-49b5-8750-b04b51317122", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.518259] env[68217]: DEBUG nova.compute.manager [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 865.518523] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 865.519899] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3450424c-392e-4152-99fe-06da17874a3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.527775] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 865.528046] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-27ae59b5-d20b-454e-bbc6-bb8b58141cce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.534677] env[68217]: DEBUG oslo_vmware.api [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 865.534677] env[68217]: value = "task-2961374" [ 865.534677] env[68217]: _type = "Task" [ 865.534677] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.543839] env[68217]: DEBUG oslo_vmware.api [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961374, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.606191] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3a467ba3-3c0b-4974-a2f2-5187438ae947 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "f369b627-9dc3-44bf-8bbe-649aace7e66c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.544s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 865.608030] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.608211] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 865.608389] env[68217]: DEBUG nova.network.neutron [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 865.650482] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961372, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.758856] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961373, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.859071] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "a7625a02-993b-4577-8d42-f763858a6154" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 865.859341] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "a7625a02-993b-4577-8d42-f763858a6154" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 865.967488] env[68217]: DEBUG oslo_concurrency.lockutils [req-65db8851-e587-477d-a9e9-7da31faf9934 req-7eec5b05-0138-4ddb-960f-81f96c2d4201 service nova] Releasing lock "refresh_cache-62628aed-e2f9-478f-bed7-00757fc3c484" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.046303] env[68217]: DEBUG oslo_vmware.api [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961374, 'name': PowerOffVM_Task, 'duration_secs': 0.317098} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.046762] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 866.047010] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 866.047304] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5faf5add-daed-487a-bbf3-dbd8daaa91a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.123592] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 866.123802] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 866.123991] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Deleting the datastore file [datastore2] 09290e60-7751-408e-9d6d-20e7cb61767b {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 866.124300] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ebc7285c-41d3-4f9a-b453-4ec01b1ff887 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.130450] env[68217]: DEBUG oslo_vmware.api [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 866.130450] env[68217]: value = "task-2961376" [ 866.130450] env[68217]: _type = "Task" [ 866.130450] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.137205] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "62628aed-e2f9-478f-bed7-00757fc3c484" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.137476] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "62628aed-e2f9-478f-bed7-00757fc3c484" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.137724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "62628aed-e2f9-478f-bed7-00757fc3c484-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 866.137922] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "62628aed-e2f9-478f-bed7-00757fc3c484-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 866.138118] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "62628aed-e2f9-478f-bed7-00757fc3c484-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 866.142857] env[68217]: DEBUG oslo_vmware.api [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961376, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.143390] env[68217]: INFO nova.compute.manager [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Terminating instance [ 866.156032] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961372, 'name': CreateSnapshot_Task, 'duration_secs': 0.914171} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.158787] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 866.160391] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad95cc26-fc11-4e15-92ff-acdd701b3c80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.263902] env[68217]: DEBUG oslo_vmware.api [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961373, 'name': PowerOnVM_Task, 'duration_secs': 0.635381} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.264201] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.264407] env[68217]: INFO nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Took 11.61 seconds to spawn the instance on the hypervisor. [ 866.264586] env[68217]: DEBUG nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 866.265401] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2514e786-ca84-49f2-90c7-6814456bad46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.332190] env[68217]: DEBUG nova.network.neutron [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance_info_cache with network_info: [{"id": "49bc7718-8633-456d-b4d1-6bcc8493670b", "address": "fa:16:3e:8b:bf:69", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49bc7718-86", "ovs_interfaceid": "49bc7718-8633-456d-b4d1-6bcc8493670b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.362527] env[68217]: DEBUG nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 866.547797] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42af632d-af34-4ecc-81bf-4eeb4a60f82b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.556290] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0f0de99-7528-4b3b-9acd-e7d5d061c022 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.588971] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c45be86-687f-4c98-86a6-28dc293a9c06 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.600260] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286dba46-4cfc-40b2-a2b4-54fb980b14d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.615695] env[68217]: DEBUG nova.compute.provider_tree [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.642654] env[68217]: DEBUG oslo_vmware.api [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961376, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345617} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.642815] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 866.643026] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 866.643395] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 866.643655] env[68217]: INFO nova.compute.manager [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Took 1.13 seconds to destroy the instance on the hypervisor. [ 866.643963] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 866.644289] env[68217]: DEBUG nova.compute.manager [-] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 866.645175] env[68217]: DEBUG nova.network.neutron [-] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 866.651666] env[68217]: DEBUG nova.compute.manager [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 866.651920] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 866.652879] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fec576d-2631-47bf-adca-043db8b5c91c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.661014] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 866.661339] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-88d2cb46-177c-4272-939a-f235b7698fc2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.667563] env[68217]: DEBUG oslo_vmware.api [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 866.667563] env[68217]: value = "task-2961377" [ 866.667563] env[68217]: _type = "Task" [ 866.667563] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.683840] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 866.683840] env[68217]: DEBUG oslo_vmware.api [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961377, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.683840] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-d8a1f913-c178-4111-9f5f-2b095ab01a60 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.691044] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 866.691044] env[68217]: value = "task-2961378" [ 866.691044] env[68217]: _type = "Task" [ 866.691044] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.707786] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961378, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.787079] env[68217]: INFO nova.compute.manager [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Took 20.57 seconds to build instance. [ 866.837776] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 866.899611] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.119441] env[68217]: DEBUG nova.scheduler.client.report [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 867.135387] env[68217]: DEBUG nova.compute.manager [req-071f58f3-2536-45df-bf81-12acc7b231d8 req-8abf6519-e5ab-490e-a548-f01ff7cc7a80 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Received event network-vif-deleted-1eb632e3-fe01-4d72-a4ea-834af75497ef {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 867.135930] env[68217]: INFO nova.compute.manager [req-071f58f3-2536-45df-bf81-12acc7b231d8 req-8abf6519-e5ab-490e-a548-f01ff7cc7a80 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Neutron deleted interface 1eb632e3-fe01-4d72-a4ea-834af75497ef; detaching it from the instance and deleting it from the info cache [ 867.135930] env[68217]: DEBUG nova.network.neutron [req-071f58f3-2536-45df-bf81-12acc7b231d8 req-8abf6519-e5ab-490e-a548-f01ff7cc7a80 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.178350] env[68217]: DEBUG oslo_vmware.api [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961377, 'name': PowerOffVM_Task, 'duration_secs': 0.333333} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.179043] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 867.179253] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 867.179521] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2632d1c4-32ae-4f06-bbea-f4997bf78e39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.204161] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961378, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.255558] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 867.255558] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 867.255793] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Deleting the datastore file [datastore2] 62628aed-e2f9-478f-bed7-00757fc3c484 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 867.256096] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d708f5ea-216a-448a-89f7-d9dbab4c726d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.263021] env[68217]: DEBUG oslo_vmware.api [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for the task: (returnval){ [ 867.263021] env[68217]: value = "task-2961380" [ 867.263021] env[68217]: _type = "Task" [ 867.263021] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.272119] env[68217]: DEBUG oslo_vmware.api [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961380, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.290322] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c1298fb-81c1-4e09-834a-0a40eada5925 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.090s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.519217] env[68217]: DEBUG nova.network.neutron [-] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.626261] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.530s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 867.627165] env[68217]: DEBUG nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 867.629541] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.464s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.632706] env[68217]: INFO nova.compute.claims [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.638938] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f525952a-f842-4016-9880-80f9956a27ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.649301] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd5e67b4-8a9b-4343-a9d1-18a3977a78de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.689590] env[68217]: DEBUG nova.compute.manager [req-071f58f3-2536-45df-bf81-12acc7b231d8 req-8abf6519-e5ab-490e-a548-f01ff7cc7a80 service nova] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Detach interface failed, port_id=1eb632e3-fe01-4d72-a4ea-834af75497ef, reason: Instance 09290e60-7751-408e-9d6d-20e7cb61767b could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 867.704154] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961378, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.773865] env[68217]: DEBUG oslo_vmware.api [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Task: {'id': task-2961380, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.22934} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.774190] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 867.774443] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 867.774757] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 867.775044] env[68217]: INFO nova.compute.manager [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Took 1.12 seconds to destroy the instance on the hypervisor. [ 867.775324] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 867.775581] env[68217]: DEBUG nova.compute.manager [-] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 867.775648] env[68217]: DEBUG nova.network.neutron [-] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 867.872950] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "03d61c68-1b37-4172-b276-67a73a0dc228" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 867.873397] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 867.873612] env[68217]: INFO nova.compute.manager [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Shelving [ 868.022785] env[68217]: INFO nova.compute.manager [-] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Took 1.38 seconds to deallocate network for instance. [ 868.056033] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52593c63-4ccf-b90f-c363-528d04c5f1c6/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 868.056963] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9312b8-1c02-49b7-90f4-18c9a36c77de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.064235] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52593c63-4ccf-b90f-c363-528d04c5f1c6/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 868.064444] env[68217]: ERROR oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52593c63-4ccf-b90f-c363-528d04c5f1c6/disk-0.vmdk due to incomplete transfer. [ 868.064685] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-f7ab9fde-6b46-44b6-b145-f0b8dbfc777f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.072329] env[68217]: DEBUG oslo_vmware.rw_handles [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52593c63-4ccf-b90f-c363-528d04c5f1c6/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 868.072531] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Uploaded image 9f4edacb-625d-403d-beb9-916f1ffd1cd7 to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 868.074820] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 868.075357] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-60a8d867-36e4-47ae-bfa8-16ae5d4e5865 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.080728] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 868.080728] env[68217]: value = "task-2961381" [ 868.080728] env[68217]: _type = "Task" [ 868.080728] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.088637] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961381, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.135223] env[68217]: DEBUG nova.compute.utils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 868.138707] env[68217]: DEBUG nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 868.138707] env[68217]: DEBUG nova.network.neutron [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 868.175622] env[68217]: DEBUG nova.policy [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8fcfd9e5288b4ee2b012a0a2cf242d7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3bf05c5ad8574e0f858cd2261af9ef24', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 868.205255] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961378, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.353589] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21448beb-2236-41fb-a76a-279ac93d6e43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.373488] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance '2612f6fc-a43f-4011-8a09-51088a49371a' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 868.447076] env[68217]: DEBUG nova.network.neutron [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Successfully created port: 6d2d73f5-5a44-440a-b5f5-6b98ba92a165 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.531155] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 868.551907] env[68217]: DEBUG nova.network.neutron [-] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.595282] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961381, 'name': Destroy_Task, 'duration_secs': 0.346586} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.595582] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Destroyed the VM [ 868.595832] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 868.596102] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-3df29cf8-d053-437a-8462-9fe7b897d824 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.606393] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 868.606393] env[68217]: value = "task-2961382" [ 868.606393] env[68217]: _type = "Task" [ 868.606393] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.616325] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961382, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.644619] env[68217]: DEBUG nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 868.709726] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961378, 'name': CloneVM_Task, 'duration_secs': 1.665348} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.710040] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Created linked-clone VM from snapshot [ 868.710824] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe0d8fe-21f9-4b4c-a191-55e8c0a2365b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.722156] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Uploading image a1eef9f2-a38c-4f50-9404-678c6d3c131a {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 868.760416] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 868.760416] env[68217]: value = "vm-594302" [ 868.760416] env[68217]: _type = "VirtualMachine" [ 868.760416] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 868.760629] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-06f5246b-da1d-4a50-ac54-40193d88c659 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.772081] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lease: (returnval){ [ 868.772081] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5253fbdf-0de3-6a59-6018-b69ff229520d" [ 868.772081] env[68217]: _type = "HttpNfcLease" [ 868.772081] env[68217]: } obtained for exporting VM: (result){ [ 868.772081] env[68217]: value = "vm-594302" [ 868.772081] env[68217]: _type = "VirtualMachine" [ 868.772081] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 868.772438] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the lease: (returnval){ [ 868.772438] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5253fbdf-0de3-6a59-6018-b69ff229520d" [ 868.772438] env[68217]: _type = "HttpNfcLease" [ 868.772438] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 868.783057] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 868.783057] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5253fbdf-0de3-6a59-6018-b69ff229520d" [ 868.783057] env[68217]: _type = "HttpNfcLease" [ 868.783057] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 868.882402] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.882402] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-062631a5-12fb-4db2-b33f-0b8d31e2357b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.884208] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.884453] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5e8e658-1603-4616-be13-d9bd8fb59f08 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.893666] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 868.893666] env[68217]: value = "task-2961385" [ 868.893666] env[68217]: _type = "Task" [ 868.893666] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.893829] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 868.893829] env[68217]: value = "task-2961384" [ 868.893829] env[68217]: _type = "Task" [ 868.893829] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.908304] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961385, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.912179] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961384, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.054482] env[68217]: INFO nova.compute.manager [-] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Took 1.28 seconds to deallocate network for instance. [ 869.117801] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961382, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.137895] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c3dd18-536f-4c54-a1cd-bdf20fffca8b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.147027] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01d5c01b-99ad-4651-bc98-db4aa2ab38a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.183277] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d1b0f1-bd2d-450f-a6b6-7548195df03f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.192596] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce88bb7-4f09-4ec8-a138-d61c092ff859 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.207531] env[68217]: DEBUG nova.compute.provider_tree [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 869.230698] env[68217]: DEBUG nova.compute.manager [req-65ae05b5-7210-4933-bdd0-26d5c137d770 req-f121a3eb-3cb6-44db-8075-b62b41bcfea3 service nova] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Received event network-vif-deleted-5186da63-8f63-49b5-8750-b04b51317122 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 869.280792] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 869.280792] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5253fbdf-0de3-6a59-6018-b69ff229520d" [ 869.280792] env[68217]: _type = "HttpNfcLease" [ 869.280792] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 869.281089] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 869.281089] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5253fbdf-0de3-6a59-6018-b69ff229520d" [ 869.281089] env[68217]: _type = "HttpNfcLease" [ 869.281089] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 869.282087] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d1c249-30e9-4b39-8589-85949ad7d66a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.289177] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e92d9e-e87e-1c0e-35fc-7a8b1e1ef5c8/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 869.289375] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e92d9e-e87e-1c0e-35fc-7a8b1e1ef5c8/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 869.405891] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961384, 'name': PowerOffVM_Task, 'duration_secs': 0.20289} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.408742] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.409058] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961385, 'name': PowerOffVM_Task, 'duration_secs': 0.26297} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.409725] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79db2cba-18ca-4e89-90b7-80a836b6be98 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.412860] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 869.413059] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance '2612f6fc-a43f-4011-8a09-51088a49371a' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 869.434862] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-497e8343-4d42-47f1-a687-788f890e5622 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.513669] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-cea99d3e-6f19-4cf2-8fa2-537e9d452c18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.562123] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.620639] env[68217]: DEBUG oslo_vmware.api [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961382, 'name': RemoveSnapshot_Task, 'duration_secs': 0.649512} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.620936] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 869.621221] env[68217]: INFO nova.compute.manager [None req-6ece9ce7-6a49-432b-9dde-f96b4152b4be tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Took 14.38 seconds to snapshot the instance on the hypervisor. [ 869.656453] env[68217]: DEBUG nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 869.695025] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 869.695025] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.695025] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 869.695247] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.695247] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 869.695247] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 869.695247] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 869.695247] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 869.695404] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 869.695404] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 869.695404] env[68217]: DEBUG nova.virt.hardware [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 869.695404] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8cc234-ec75-4bb1-afa0-51719c8c867d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.703931] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a0a291e-b896-42f1-863a-3c69273063b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.721020] env[68217]: DEBUG nova.scheduler.client.report [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 869.921291] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 869.921669] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.921913] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 869.922240] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.922525] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 869.922766] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 869.923061] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 869.923377] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 869.923631] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 869.923908] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 869.924106] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 869.930354] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8463cd77-fe20-4455-bf62-c2888481cf84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.943623] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 869.947712] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-b3e34011-6680-42ef-97a2-a9760894ea9e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.950592] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "650ebd16-da81-475e-a82a-7fa5fb2880bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 869.951118] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 869.955642] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 869.955642] env[68217]: value = "task-2961386" [ 869.955642] env[68217]: _type = "Task" [ 869.955642] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.960699] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 869.960699] env[68217]: value = "task-2961387" [ 869.960699] env[68217]: _type = "Task" [ 869.960699] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.973449] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961386, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.981121] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961387, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.215754] env[68217]: DEBUG nova.network.neutron [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Successfully updated port: 6d2d73f5-5a44-440a-b5f5-6b98ba92a165 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 870.227554] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.598s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 870.228180] env[68217]: DEBUG nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 870.235307] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.413s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 870.236227] env[68217]: DEBUG nova.objects.instance [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lazy-loading 'pci_requests' on Instance uuid 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.456424] env[68217]: DEBUG nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 870.469613] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961386, 'name': ReconfigVM_Task, 'duration_secs': 0.272307} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.470279] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance '2612f6fc-a43f-4011-8a09-51088a49371a' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 870.477553] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961387, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.718672] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "refresh_cache-a513976b-4859-4822-8989-c9452db62ee6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.718872] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "refresh_cache-a513976b-4859-4822-8989-c9452db62ee6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 870.718976] env[68217]: DEBUG nova.network.neutron [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 870.739579] env[68217]: DEBUG nova.compute.utils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 870.742363] env[68217]: DEBUG nova.objects.instance [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lazy-loading 'numa_topology' on Instance uuid 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 870.744021] env[68217]: DEBUG nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 870.744383] env[68217]: DEBUG nova.network.neutron [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 870.819886] env[68217]: DEBUG nova.policy [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10c1ddac3d4946f88e9762a2bea8cfa9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '726c3dbb291b49b39db3ef87e35cdfbd', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 870.985018] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 870.985018] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 870.985018] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 870.985018] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 870.985418] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 870.985418] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 870.985418] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 870.985418] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 870.985418] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 870.985588] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 870.985588] env[68217]: DEBUG nova.virt.hardware [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 870.990458] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Reconfiguring VM instance instance-00000043 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 870.991228] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961387, 'name': CreateSnapshot_Task, 'duration_secs': 0.783455} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.992544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 870.993512] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-667bb994-8c6e-4fa3-bfe2-f75d0e0af132 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.009862] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 871.010889] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4be4a4d-4538-489e-b4ed-b9916d968c42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.024275] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 871.024275] env[68217]: value = "task-2961388" [ 871.024275] env[68217]: _type = "Task" [ 871.024275] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.035453] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961388, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.248115] env[68217]: DEBUG nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 871.249252] env[68217]: INFO nova.compute.claims [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 871.296254] env[68217]: DEBUG nova.network.neutron [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 871.329723] env[68217]: DEBUG nova.network.neutron [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Successfully created port: 747300c0-a758-483f-ba39-99efe6e731ec {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 871.332731] env[68217]: DEBUG nova.compute.manager [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Received event network-vif-plugged-6d2d73f5-5a44-440a-b5f5-6b98ba92a165 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.333027] env[68217]: DEBUG oslo_concurrency.lockutils [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] Acquiring lock "a513976b-4859-4822-8989-c9452db62ee6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 871.333355] env[68217]: DEBUG oslo_concurrency.lockutils [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] Lock "a513976b-4859-4822-8989-c9452db62ee6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 871.333472] env[68217]: DEBUG oslo_concurrency.lockutils [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] Lock "a513976b-4859-4822-8989-c9452db62ee6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 871.333735] env[68217]: DEBUG nova.compute.manager [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] No waiting events found dispatching network-vif-plugged-6d2d73f5-5a44-440a-b5f5-6b98ba92a165 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 871.333950] env[68217]: WARNING nova.compute.manager [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Received unexpected event network-vif-plugged-6d2d73f5-5a44-440a-b5f5-6b98ba92a165 for instance with vm_state building and task_state spawning. [ 871.334134] env[68217]: DEBUG nova.compute.manager [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Received event network-changed-6d2d73f5-5a44-440a-b5f5-6b98ba92a165 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 871.334288] env[68217]: DEBUG nova.compute.manager [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Refreshing instance network info cache due to event network-changed-6d2d73f5-5a44-440a-b5f5-6b98ba92a165. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 871.334476] env[68217]: DEBUG oslo_concurrency.lockutils [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] Acquiring lock "refresh_cache-a513976b-4859-4822-8989-c9452db62ee6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.533205] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 871.533769] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-7f5d613a-40f6-4467-be1a-158a8881d37e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.549766] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961388, 'name': ReconfigVM_Task, 'duration_secs': 0.27099} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.551880] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Reconfigured VM instance instance-00000043 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 871.552039] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 871.552039] env[68217]: value = "task-2961389" [ 871.552039] env[68217]: _type = "Task" [ 871.552039] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.552759] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c978dfb8-452b-4c88-8bc0-80a692433303 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.564342] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961389, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.583263] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Reconfiguring VM instance instance-00000043 to attach disk [datastore2] 2612f6fc-a43f-4011-8a09-51088a49371a/2612f6fc-a43f-4011-8a09-51088a49371a.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.584444] env[68217]: DEBUG nova.network.neutron [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Updating instance_info_cache with network_info: [{"id": "6d2d73f5-5a44-440a-b5f5-6b98ba92a165", "address": "fa:16:3e:26:e4:d0", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d2d73f5-5a", "ovs_interfaceid": "6d2d73f5-5a44-440a-b5f5-6b98ba92a165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.585764] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db006ef6-7e27-497b-a3aa-14b40a20f02b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.606231] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 871.606231] env[68217]: value = "task-2961390" [ 871.606231] env[68217]: _type = "Task" [ 871.606231] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.616884] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961390, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.065156] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961389, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.099980] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "refresh_cache-a513976b-4859-4822-8989-c9452db62ee6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.102009] env[68217]: DEBUG nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Instance network_info: |[{"id": "6d2d73f5-5a44-440a-b5f5-6b98ba92a165", "address": "fa:16:3e:26:e4:d0", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d2d73f5-5a", "ovs_interfaceid": "6d2d73f5-5a44-440a-b5f5-6b98ba92a165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 872.102009] env[68217]: DEBUG oslo_concurrency.lockutils [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] Acquired lock "refresh_cache-a513976b-4859-4822-8989-c9452db62ee6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.102144] env[68217]: DEBUG nova.network.neutron [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Refreshing network info cache for port 6d2d73f5-5a44-440a-b5f5-6b98ba92a165 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 872.102273] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:e4:d0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd669e36a-5c9d-4fa4-92c8-90e7cb814262', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6d2d73f5-5a44-440a-b5f5-6b98ba92a165', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.109872] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 872.110225] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a513976b-4859-4822-8989-c9452db62ee6] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 872.114046] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61f66bb3-122c-410f-bc3e-be341b137009 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.134626] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961390, 'name': ReconfigVM_Task, 'duration_secs': 0.364541} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.136192] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Reconfigured VM instance instance-00000043 to attach disk [datastore2] 2612f6fc-a43f-4011-8a09-51088a49371a/2612f6fc-a43f-4011-8a09-51088a49371a.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 872.136439] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance '2612f6fc-a43f-4011-8a09-51088a49371a' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 872.139625] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 872.139625] env[68217]: value = "task-2961391" [ 872.139625] env[68217]: _type = "Task" [ 872.139625] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.148675] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961391, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.262535] env[68217]: DEBUG nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 872.295504] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:19:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b6918665-ab7d-45a4-86f9-01de99934033',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-847543468',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 872.295848] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 872.296057] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 872.296484] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 872.296615] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 872.296781] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 872.296990] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 872.297230] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 872.297386] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 872.297547] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 872.297716] env[68217]: DEBUG nova.virt.hardware [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 872.298842] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67879761-bff2-4f5f-b3ee-e5d131d8e737 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.306821] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af88dcd-7a70-47d2-addf-b6f7fdb61418 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.565088] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961389, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.648575] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db53c6f3-c479-4503-9655-a947ba76da40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.659887] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961391, 'name': CreateVM_Task, 'duration_secs': 0.516562} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.676641] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a513976b-4859-4822-8989-c9452db62ee6] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 872.680357] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.680576] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.680956] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 872.681877] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15074f3-00a3-481a-a2ec-7e9a51e9b90c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.684972] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d7ef794-3c52-49f3-bb78-55a13399a5f5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.690686] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 872.690686] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f597fa-7f70-f320-a1ea-76324cf3f681" [ 872.690686] env[68217]: _type = "Task" [ 872.690686] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.706531] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance '2612f6fc-a43f-4011-8a09-51088a49371a' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 872.725290] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f597fa-7f70-f320-a1ea-76324cf3f681, 'name': SearchDatastore_Task, 'duration_secs': 0.014278} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.725877] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 872.726000] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 872.726273] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.726424] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 872.726607] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 872.727268] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8f7268d5-623e-46d0-9c82-ba2ebbe33230 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.738377] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 872.738513] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 872.739333] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-477f20d3-270d-4e7b-9b0b-584e4e875a48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.744779] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 872.744779] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521833c2-723a-5d91-13da-b7549d21bd63" [ 872.744779] env[68217]: _type = "Task" [ 872.744779] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.755885] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521833c2-723a-5d91-13da-b7549d21bd63, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.758991] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-301ab411-9949-4a16-b210-c4a29d08b1ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.766942] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d87dded-e559-46fc-8ada-ee0948d65024 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.800887] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d838dba-a550-4e7e-8b20-0ab705c3af42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.809012] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96240e10-e0a3-4686-9e8c-9859d40c34b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.827168] env[68217]: DEBUG nova.compute.provider_tree [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.065698] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961389, 'name': CloneVM_Task, 'duration_secs': 1.300362} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.066395] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Created linked-clone VM from snapshot [ 873.067183] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90d4ee30-2765-4145-a351-870b975f266c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.074499] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Uploading image 8f1b0ac2-efb1-4feb-8dce-4206b58682bf {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 873.103191] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 873.103191] env[68217]: value = "vm-594304" [ 873.103191] env[68217]: _type = "VirtualMachine" [ 873.103191] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 873.103691] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-76c53b70-b2f4-4ca2-963a-492ea0bb5c8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.110785] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lease: (returnval){ [ 873.110785] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c493e7-dace-b2e4-9c62-ccacc05196c1" [ 873.110785] env[68217]: _type = "HttpNfcLease" [ 873.110785] env[68217]: } obtained for exporting VM: (result){ [ 873.110785] env[68217]: value = "vm-594304" [ 873.110785] env[68217]: _type = "VirtualMachine" [ 873.110785] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 873.111060] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the lease: (returnval){ [ 873.111060] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c493e7-dace-b2e4-9c62-ccacc05196c1" [ 873.111060] env[68217]: _type = "HttpNfcLease" [ 873.111060] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 873.117326] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 873.117326] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c493e7-dace-b2e4-9c62-ccacc05196c1" [ 873.117326] env[68217]: _type = "HttpNfcLease" [ 873.117326] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 873.253752] env[68217]: DEBUG nova.network.neutron [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Updated VIF entry in instance network info cache for port 6d2d73f5-5a44-440a-b5f5-6b98ba92a165. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 873.254080] env[68217]: DEBUG nova.network.neutron [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Updating instance_info_cache with network_info: [{"id": "6d2d73f5-5a44-440a-b5f5-6b98ba92a165", "address": "fa:16:3e:26:e4:d0", "network": {"id": "d699b565-498b-4788-9c08-0e23871a9180", "bridge": "br-int", "label": "tempest-ImagesOneServerNegativeTestJSON-728170078-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3bf05c5ad8574e0f858cd2261af9ef24", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d669e36a-5c9d-4fa4-92c8-90e7cb814262", "external-id": "nsx-vlan-transportzone-589", "segmentation_id": 589, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6d2d73f5-5a", "ovs_interfaceid": "6d2d73f5-5a44-440a-b5f5-6b98ba92a165", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.259886] env[68217]: DEBUG nova.network.neutron [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Port 49bc7718-8633-456d-b4d1-6bcc8493670b binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 873.263918] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521833c2-723a-5d91-13da-b7549d21bd63, 'name': SearchDatastore_Task, 'duration_secs': 0.010726} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.263918] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f0767e3-11bb-4e2a-8141-370e9ba94bf4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.269807] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 873.269807] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525be734-fb8b-cbf3-2757-ba34e1fa735c" [ 873.269807] env[68217]: _type = "Task" [ 873.269807] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.279950] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525be734-fb8b-cbf3-2757-ba34e1fa735c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.336191] env[68217]: DEBUG nova.scheduler.client.report [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 873.411561] env[68217]: DEBUG nova.compute.manager [req-38db7a49-bc97-4685-98c6-345ed024596c req-8d5ec561-c438-4b79-9d32-dcf16619f6a6 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Received event network-vif-plugged-747300c0-a758-483f-ba39-99efe6e731ec {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 873.411962] env[68217]: DEBUG oslo_concurrency.lockutils [req-38db7a49-bc97-4685-98c6-345ed024596c req-8d5ec561-c438-4b79-9d32-dcf16619f6a6 service nova] Acquiring lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 873.412510] env[68217]: DEBUG oslo_concurrency.lockutils [req-38db7a49-bc97-4685-98c6-345ed024596c req-8d5ec561-c438-4b79-9d32-dcf16619f6a6 service nova] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.412903] env[68217]: DEBUG oslo_concurrency.lockutils [req-38db7a49-bc97-4685-98c6-345ed024596c req-8d5ec561-c438-4b79-9d32-dcf16619f6a6 service nova] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.413151] env[68217]: DEBUG nova.compute.manager [req-38db7a49-bc97-4685-98c6-345ed024596c req-8d5ec561-c438-4b79-9d32-dcf16619f6a6 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] No waiting events found dispatching network-vif-plugged-747300c0-a758-483f-ba39-99efe6e731ec {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 873.413323] env[68217]: WARNING nova.compute.manager [req-38db7a49-bc97-4685-98c6-345ed024596c req-8d5ec561-c438-4b79-9d32-dcf16619f6a6 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Received unexpected event network-vif-plugged-747300c0-a758-483f-ba39-99efe6e731ec for instance with vm_state building and task_state spawning. [ 873.550537] env[68217]: DEBUG nova.network.neutron [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Successfully updated port: 747300c0-a758-483f-ba39-99efe6e731ec {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.620626] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 873.620626] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c493e7-dace-b2e4-9c62-ccacc05196c1" [ 873.620626] env[68217]: _type = "HttpNfcLease" [ 873.620626] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 873.621607] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 873.621607] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c493e7-dace-b2e4-9c62-ccacc05196c1" [ 873.621607] env[68217]: _type = "HttpNfcLease" [ 873.621607] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 873.622401] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e4675d-3301-4bea-971b-d265fed00d22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.630793] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa509f-9d9d-2195-0631-532e9b8d0b6e/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 873.630975] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa509f-9d9d-2195-0631-532e9b8d0b6e/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 873.726673] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-865f33b6-0437-47e4-8261-dc6dd44c6038 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.766441] env[68217]: DEBUG oslo_concurrency.lockutils [req-7be7b806-4df8-48ca-b30b-a01411743bd7 req-97f09bac-a8e7-4789-840f-5cd1695c2730 service nova] Releasing lock "refresh_cache-a513976b-4859-4822-8989-c9452db62ee6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.780965] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525be734-fb8b-cbf3-2757-ba34e1fa735c, 'name': SearchDatastore_Task, 'duration_secs': 0.009726} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.780965] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 873.781210] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] a513976b-4859-4822-8989-c9452db62ee6/a513976b-4859-4822-8989-c9452db62ee6.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.781407] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-6729248d-a1e9-4c86-bdc6-20a8a9d37ed1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.788184] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 873.788184] env[68217]: value = "task-2961393" [ 873.788184] env[68217]: _type = "Task" [ 873.788184] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.796251] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961393, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.842110] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.607s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 873.844596] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.912s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 873.846355] env[68217]: INFO nova.compute.claims [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 873.888362] env[68217]: INFO nova.network.neutron [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating port 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 874.055787] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.055787] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 874.055787] env[68217]: DEBUG nova.network.neutron [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 874.284302] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 874.284302] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 874.284673] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 874.298750] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961393, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.506366} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.301586] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] a513976b-4859-4822-8989-c9452db62ee6/a513976b-4859-4822-8989-c9452db62ee6.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 874.301586] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 874.301586] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b49f7cf4-1d83-4615-a366-83a919445f46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.308790] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 874.308790] env[68217]: value = "task-2961394" [ 874.308790] env[68217]: _type = "Task" [ 874.308790] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.318517] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961394, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.590344] env[68217]: DEBUG nova.network.neutron [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 874.820672] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961394, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088142} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.821986] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.822966] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48eb333d-e2cb-4d9a-9f8b-ab323d046ff7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.828614] env[68217]: DEBUG nova.network.neutron [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance_info_cache with network_info: [{"id": "747300c0-a758-483f-ba39-99efe6e731ec", "address": "fa:16:3e:2a:04:4a", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap747300c0-a7", "ovs_interfaceid": "747300c0-a758-483f-ba39-99efe6e731ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.850356] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] a513976b-4859-4822-8989-c9452db62ee6/a513976b-4859-4822-8989-c9452db62ee6.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.852192] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-264e3189-0324-410c-afff-9a2779002a2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.880630] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 874.880630] env[68217]: value = "task-2961395" [ 874.880630] env[68217]: _type = "Task" [ 874.880630] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.889964] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961395, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.320918] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52bc2d88-b1a6-4216-a66e-477796d05aab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.332638] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8373b5c9-884d-431f-9a8c-f7bb5b81472e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.366538] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 875.366978] env[68217]: DEBUG nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Instance network_info: |[{"id": "747300c0-a758-483f-ba39-99efe6e731ec", "address": "fa:16:3e:2a:04:4a", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap747300c0-a7", "ovs_interfaceid": "747300c0-a758-483f-ba39-99efe6e731ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 875.368340] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.368561] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.368741] env[68217]: DEBUG nova.network.neutron [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.370277] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2a:04:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a7188cb6-c9d7-449a-9362-ddb61a31feaf', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '747300c0-a758-483f-ba39-99efe6e731ec', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 875.379031] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 875.379194] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b3f53a3-2121-4190-a7bc-7b3400ccbae7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.382712] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 875.383013] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-687a8e88-d970-45a6-9a0a-31130e036279 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.424276] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961395, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.427668] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 875.427668] env[68217]: value = "task-2961396" [ 875.427668] env[68217]: _type = "Task" [ 875.427668] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.429073] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ecf80f-aed8-442c-9e38-e3f99ca77e6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.442285] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961396, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.450920] env[68217]: DEBUG nova.compute.provider_tree [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.455278] env[68217]: DEBUG nova.compute.manager [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Received event network-changed-747300c0-a758-483f-ba39-99efe6e731ec {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 875.455473] env[68217]: DEBUG nova.compute.manager [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Refreshing instance network info cache due to event network-changed-747300c0-a758-483f-ba39-99efe6e731ec. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 875.455690] env[68217]: DEBUG oslo_concurrency.lockutils [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] Acquiring lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.455828] env[68217]: DEBUG oslo_concurrency.lockutils [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] Acquired lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.455984] env[68217]: DEBUG nova.network.neutron [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Refreshing network info cache for port 747300c0-a758-483f-ba39-99efe6e731ec {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 875.506979] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.507289] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.507522] env[68217]: DEBUG nova.network.neutron [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 875.897913] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961395, 'name': ReconfigVM_Task, 'duration_secs': 1.016562} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.898235] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Reconfigured VM instance instance-00000049 to attach disk [datastore2] a513976b-4859-4822-8989-c9452db62ee6/a513976b-4859-4822-8989-c9452db62ee6.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 875.898955] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6d8368a-f649-41a3-97cb-86bb56b01711 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.907257] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 875.907257] env[68217]: value = "task-2961397" [ 875.907257] env[68217]: _type = "Task" [ 875.907257] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.916976] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961397, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.940793] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961396, 'name': CreateVM_Task, 'duration_secs': 0.397047} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.940980] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.941733] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.941893] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 875.942277] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 875.942560] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1853977-d1be-4926-9d0d-f692c8624330 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.949531] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 875.949531] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d487eb-0057-99e0-949b-4db863763fd1" [ 875.949531] env[68217]: _type = "Task" [ 875.949531] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.961876] env[68217]: DEBUG nova.scheduler.client.report [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 875.967899] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d487eb-0057-99e0-949b-4db863763fd1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.195449] env[68217]: DEBUG nova.network.neutron [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updated VIF entry in instance network info cache for port 747300c0-a758-483f-ba39-99efe6e731ec. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 876.195888] env[68217]: DEBUG nova.network.neutron [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance_info_cache with network_info: [{"id": "747300c0-a758-483f-ba39-99efe6e731ec", "address": "fa:16:3e:2a:04:4a", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap747300c0-a7", "ovs_interfaceid": "747300c0-a758-483f-ba39-99efe6e731ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.294608] env[68217]: DEBUG nova.network.neutron [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance_info_cache with network_info: [{"id": "49bc7718-8633-456d-b4d1-6bcc8493670b", "address": "fa:16:3e:8b:bf:69", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49bc7718-86", "ovs_interfaceid": "49bc7718-8633-456d-b4d1-6bcc8493670b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.394859] env[68217]: DEBUG nova.network.neutron [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap686a0657-d9", "ovs_interfaceid": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 876.424673] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961397, 'name': Rename_Task, 'duration_secs': 0.201298} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.424961] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 876.425266] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a37408e8-39b1-45a1-8fe5-5ce0558c2181 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.434590] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 876.434590] env[68217]: value = "task-2961398" [ 876.434590] env[68217]: _type = "Task" [ 876.434590] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.443337] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961398, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.459675] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d487eb-0057-99e0-949b-4db863763fd1, 'name': SearchDatastore_Task, 'duration_secs': 0.013541} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.460065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.460289] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 876.460524] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 876.460671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 876.460849] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 876.461124] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f57dcda7-358a-4124-bb03-d3985123ca01 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.470251] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.626s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.470832] env[68217]: DEBUG nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 876.474892] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.575s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.476437] env[68217]: INFO nova.compute.claims [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 876.479210] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 876.479391] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 876.480665] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-08b7bee3-b2bf-4482-bc4a-602d74447806 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.488919] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 876.488919] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522b24cb-8e01-6822-09b9-1bbe2b870e3a" [ 876.488919] env[68217]: _type = "Task" [ 876.488919] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.498675] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522b24cb-8e01-6822-09b9-1bbe2b870e3a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.703067] env[68217]: DEBUG oslo_concurrency.lockutils [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] Releasing lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.703067] env[68217]: DEBUG nova.compute.manager [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received event network-vif-plugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 876.703067] env[68217]: DEBUG oslo_concurrency.lockutils [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 876.703067] env[68217]: DEBUG oslo_concurrency.lockutils [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 876.703067] env[68217]: DEBUG oslo_concurrency.lockutils [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 876.703381] env[68217]: DEBUG nova.compute.manager [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] No waiting events found dispatching network-vif-plugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 876.703381] env[68217]: WARNING nova.compute.manager [req-6af2d060-7114-493b-8947-0effbf4075ed req-ca36e7bf-179f-47db-a375-95b194c718e7 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received unexpected event network-vif-plugged-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 for instance with vm_state shelved_offloaded and task_state spawning. [ 876.797811] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.897775] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 876.948092] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961398, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.982540] env[68217]: DEBUG nova.compute.utils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 876.986865] env[68217]: DEBUG nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 876.987069] env[68217]: DEBUG nova.network.neutron [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 876.999369] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522b24cb-8e01-6822-09b9-1bbe2b870e3a, 'name': SearchDatastore_Task, 'duration_secs': 0.011845} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.001168] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-400f24d6-0dcb-478c-81e8-398ce95738e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.008620] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 877.008620] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ba8a09-dfc1-9b08-61a9-019c9e7272d9" [ 877.008620] env[68217]: _type = "Task" [ 877.008620] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.020030] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ba8a09-dfc1-9b08-61a9-019c9e7272d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.050077] env[68217]: DEBUG nova.policy [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '011d38e070744a3fb3c515d5e669ed22', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c9ba7843e6144cd1877b48bc40cd64f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 877.112423] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1599ea10a2f7d375a62cb6aecb14f745',container_format='bare',created_at=2025-03-12T08:19:28Z,direct_url=,disk_format='vmdk',id=31c3d1c5-dcbd-447b-935c-0ac48e805003,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-409473508-shelved',owner='023b801c234d47d79cb57ea73058e81c',properties=ImageMetaProps,protected=,size=31665664,status='active',tags=,updated_at=2025-03-12T08:19:46Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 877.112683] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 877.112892] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 877.113596] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 877.113991] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 877.114185] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 877.114398] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 877.114561] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 877.114726] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 877.114881] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 877.115065] env[68217]: DEBUG nova.virt.hardware [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 877.117974] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5e7521-8b95-44c0-ad43-b677bea594dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.129997] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e92d9e-e87e-1c0e-35fc-7a8b1e1ef5c8/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 877.131331] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2791887f-0a31-4daa-b8ef-594bced643c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.136158] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c46edd1e-3ac6-43d8-a9ca-18fa75c82603 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.154315] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2c:18:00', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'e0c77754-4085-434b-a3e8-d61be099ac67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 877.162443] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 877.165401] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 877.167029] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e92d9e-e87e-1c0e-35fc-7a8b1e1ef5c8/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 877.167029] env[68217]: ERROR oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e92d9e-e87e-1c0e-35fc-7a8b1e1ef5c8/disk-0.vmdk due to incomplete transfer. [ 877.167029] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b8f23c54-bd2d-4980-a017-c01bf8f50ef7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.187045] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bfee794c-729f-4bc8-8f8b-610203fb69f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.196429] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 877.196429] env[68217]: value = "task-2961399" [ 877.196429] env[68217]: _type = "Task" [ 877.196429] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.197833] env[68217]: DEBUG oslo_vmware.rw_handles [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52e92d9e-e87e-1c0e-35fc-7a8b1e1ef5c8/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 877.198072] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Uploaded image a1eef9f2-a38c-4f50-9404-678c6d3c131a to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 877.200874] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 877.205180] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-025df945-8087-464a-b1f1-242dd8a9a752 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.213679] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961399, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.215543] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 877.215543] env[68217]: value = "task-2961400" [ 877.215543] env[68217]: _type = "Task" [ 877.215543] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.225598] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961400, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.325655] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1021a0b4-28ba-4bbd-82ec-a6135bdec9a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.347265] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bade3767-41bd-4d26-9d63-a3b5890094c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.355863] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance '2612f6fc-a43f-4011-8a09-51088a49371a' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 877.447634] env[68217]: DEBUG oslo_vmware.api [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961398, 'name': PowerOnVM_Task, 'duration_secs': 0.562179} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.447949] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.448153] env[68217]: INFO nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Took 7.79 seconds to spawn the instance on the hypervisor. [ 877.448367] env[68217]: DEBUG nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 877.449465] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bbf8abd-3b84-40e5-bce9-0cd728e5ec97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.486561] env[68217]: DEBUG nova.compute.manager [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received event network-changed-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 877.486806] env[68217]: DEBUG nova.compute.manager [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Refreshing instance network info cache due to event network-changed-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 877.487231] env[68217]: DEBUG oslo_concurrency.lockutils [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] Acquiring lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.487440] env[68217]: DEBUG oslo_concurrency.lockutils [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] Acquired lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 877.487649] env[68217]: DEBUG nova.network.neutron [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Refreshing network info cache for port 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 877.489445] env[68217]: DEBUG nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 877.525181] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ba8a09-dfc1-9b08-61a9-019c9e7272d9, 'name': SearchDatastore_Task, 'duration_secs': 0.011228} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.525708] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 877.526192] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.526756] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b261c0fb-3fd5-4f6b-8ffc-311aa2d7dc63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.535745] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 877.535745] env[68217]: value = "task-2961401" [ 877.535745] env[68217]: _type = "Task" [ 877.535745] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.545805] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961401, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.565784] env[68217]: DEBUG nova.network.neutron [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Successfully created port: 5002fb09-ddc6-4497-a55f-8cfe415c4d70 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 877.713483] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961399, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.729998] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961400, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.866283] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 877.866418] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-108fe568-2ab4-4d17-a058-c7de52f4e97a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.876624] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 877.876624] env[68217]: value = "task-2961402" [ 877.876624] env[68217]: _type = "Task" [ 877.876624] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.890084] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961402, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.980450] env[68217]: INFO nova.compute.manager [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Took 17.75 seconds to build instance. [ 878.048208] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961401, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.509155} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.049546] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.049776] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.050576] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb1557c-0904-4c7a-aa56-6ddc905396e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.057681] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1142be7e-6a58-4c95-850f-546fab767ed0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.064236] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 878.064236] env[68217]: value = "task-2961403" [ 878.064236] env[68217]: _type = "Task" [ 878.064236] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.065729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7434b45f-0522-4651-a304-20943b513198 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.079731] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961403, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.108925] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ec19b55-eb8f-4ce4-8cfd-a276e90718f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.118199] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8e1bab6-84b1-4b16-8fde-3f947066c7b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.133105] env[68217]: DEBUG nova.compute.provider_tree [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 878.211025] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961399, 'name': CreateVM_Task, 'duration_secs': 0.543526} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.214155] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 878.214805] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.215134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "[datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.215514] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 878.215924] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-032afad6-c1e3-4eca-8469-5c73b7737487 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.224649] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 878.224649] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52445280-6fe8-f48e-c0c9-2ec714f1f3a6" [ 878.224649] env[68217]: _type = "Task" [ 878.224649] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.227965] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961400, 'name': Destroy_Task, 'duration_secs': 0.618706} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.231283] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Destroyed the VM [ 878.231622] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 878.231832] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-b0cc964a-a114-45f5-9203-652a54f00db1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.239930] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52445280-6fe8-f48e-c0c9-2ec714f1f3a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.241914] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 878.241914] env[68217]: value = "task-2961404" [ 878.241914] env[68217]: _type = "Task" [ 878.241914] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.250503] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961404, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.340652] env[68217]: DEBUG nova.network.neutron [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updated VIF entry in instance network info cache for port 686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 878.341140] env[68217]: DEBUG nova.network.neutron [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap686a0657-d9", "ovs_interfaceid": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 878.389729] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961402, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.487175] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8e87d4b7-d21f-4c31-bee0-b805231a546d tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "a513976b-4859-4822-8989-c9452db62ee6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.273s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 878.504660] env[68217]: DEBUG nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 878.536677] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 878.537156] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 878.537156] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 878.537796] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 878.537796] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 878.537796] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 878.537965] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 878.537999] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 878.538174] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 878.538339] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 878.538510] env[68217]: DEBUG nova.virt.hardware [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 878.539453] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dabc5bb7-a618-4f7b-ab2a-9516fd7c9ab6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.548922] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2cf1567-af0f-4f53-a540-0b6f49b75ddf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.575446] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961403, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.08525} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.575811] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.576661] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f158aa5d-ef58-4fad-8004-67917ccab50a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.601324] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.601677] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a5763ab-727c-4e21-b9da-ce836328be15 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.626680] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 878.626680] env[68217]: value = "task-2961405" [ 878.626680] env[68217]: _type = "Task" [ 878.626680] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.636820] env[68217]: DEBUG nova.scheduler.client.report [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 878.645468] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961405, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.739931] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "[datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.740262] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Processing image 31c3d1c5-dcbd-447b-935c-0ac48e805003 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 878.740552] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003/31c3d1c5-dcbd-447b-935c-0ac48e805003.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 878.740741] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "[datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003/31c3d1c5-dcbd-447b-935c-0ac48e805003.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 878.740964] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 878.741281] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-996069ed-0dca-48bb-a59f-3a230ff7a9b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.752901] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961404, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.754980] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 878.755181] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 878.755957] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45d0bc9f-b594-4fd1-80c0-702fed15af62 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.761616] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 878.761616] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525ead40-e22f-2008-0c6f-511c6319b78a" [ 878.761616] env[68217]: _type = "Task" [ 878.761616] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.773839] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525ead40-e22f-2008-0c6f-511c6319b78a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.846035] env[68217]: DEBUG oslo_concurrency.lockutils [req-e964fb77-ae2d-45c0-862e-9abb65b5cd05 req-ad810069-1869-4b5e-b7a4-946422e70290 service nova] Releasing lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 878.888575] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961402, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.137605] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961405, 'name': ReconfigVM_Task, 'duration_secs': 0.402177} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.137891] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfigured VM instance instance-0000004a to attach disk [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.138538] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-475fac8a-7ddb-453f-aa56-c8983da9c191 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.147734] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.673s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.148427] env[68217]: DEBUG nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 879.153447] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.622s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.153922] env[68217]: DEBUG nova.objects.instance [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lazy-loading 'resources' on Instance uuid 09290e60-7751-408e-9d6d-20e7cb61767b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 879.155431] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 879.155431] env[68217]: value = "task-2961406" [ 879.155431] env[68217]: _type = "Task" [ 879.155431] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.156302] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "a513976b-4859-4822-8989-c9452db62ee6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.156632] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "a513976b-4859-4822-8989-c9452db62ee6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.156948] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "a513976b-4859-4822-8989-c9452db62ee6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.161288] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "a513976b-4859-4822-8989-c9452db62ee6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.004s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.161505] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "a513976b-4859-4822-8989-c9452db62ee6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.169249] env[68217]: INFO nova.compute.manager [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Terminating instance [ 879.181938] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961406, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.258590] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961404, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.272260] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Preparing fetch location {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 879.272583] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Fetch image to [datastore2] OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9/OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9.vmdk {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 879.272817] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Downloading stream optimized image 31c3d1c5-dcbd-447b-935c-0ac48e805003 to [datastore2] OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9/OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9.vmdk on the data store datastore2 as vApp {{(pid=68217) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 879.273065] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Downloading image file data 31c3d1c5-dcbd-447b-935c-0ac48e805003 to the ESX as VM named 'OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9' {{(pid=68217) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 879.355107] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 879.355107] env[68217]: value = "resgroup-9" [ 879.355107] env[68217]: _type = "ResourcePool" [ 879.355107] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 879.355445] env[68217]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-4f00772a-565f-4996-9063-0f42daeaab81 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.385176] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lease: (returnval){ [ 879.385176] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ac9c3e-cbdf-6568-bf70-d65fe0d928ae" [ 879.385176] env[68217]: _type = "HttpNfcLease" [ 879.385176] env[68217]: } obtained for vApp import into resource pool (val){ [ 879.385176] env[68217]: value = "resgroup-9" [ 879.385176] env[68217]: _type = "ResourcePool" [ 879.385176] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 879.385455] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the lease: (returnval){ [ 879.385455] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ac9c3e-cbdf-6568-bf70-d65fe0d928ae" [ 879.385455] env[68217]: _type = "HttpNfcLease" [ 879.385455] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 879.393740] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961402, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.398653] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 879.398653] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ac9c3e-cbdf-6568-bf70-d65fe0d928ae" [ 879.398653] env[68217]: _type = "HttpNfcLease" [ 879.398653] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 879.413885] env[68217]: DEBUG nova.network.neutron [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Successfully updated port: 5002fb09-ddc6-4497-a55f-8cfe415c4d70 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 879.517659] env[68217]: DEBUG nova.compute.manager [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Received event network-vif-plugged-5002fb09-ddc6-4497-a55f-8cfe415c4d70 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 879.517978] env[68217]: DEBUG oslo_concurrency.lockutils [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] Acquiring lock "ca9ef7ff-b942-4363-a4f8-9163791ec162-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.520701] env[68217]: DEBUG oslo_concurrency.lockutils [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.520931] env[68217]: DEBUG oslo_concurrency.lockutils [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.003s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 879.521175] env[68217]: DEBUG nova.compute.manager [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] No waiting events found dispatching network-vif-plugged-5002fb09-ddc6-4497-a55f-8cfe415c4d70 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 879.521367] env[68217]: WARNING nova.compute.manager [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Received unexpected event network-vif-plugged-5002fb09-ddc6-4497-a55f-8cfe415c4d70 for instance with vm_state building and task_state spawning. [ 879.521575] env[68217]: DEBUG nova.compute.manager [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Received event network-changed-5002fb09-ddc6-4497-a55f-8cfe415c4d70 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 879.521743] env[68217]: DEBUG nova.compute.manager [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Refreshing instance network info cache due to event network-changed-5002fb09-ddc6-4497-a55f-8cfe415c4d70. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 879.521967] env[68217]: DEBUG oslo_concurrency.lockutils [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] Acquiring lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.522160] env[68217]: DEBUG oslo_concurrency.lockutils [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] Acquired lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 879.522327] env[68217]: DEBUG nova.network.neutron [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Refreshing network info cache for port 5002fb09-ddc6-4497-a55f-8cfe415c4d70 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 879.644585] env[68217]: DEBUG oslo_concurrency.lockutils [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "da1524a7-2756-4429-ada2-b1f493544bd2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 879.644836] env[68217]: DEBUG oslo_concurrency.lockutils [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 879.660093] env[68217]: DEBUG nova.compute.utils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 879.662447] env[68217]: DEBUG nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 879.662447] env[68217]: DEBUG nova.network.neutron [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 879.675799] env[68217]: DEBUG nova.compute.manager [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 879.676117] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 879.676464] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961406, 'name': Rename_Task, 'duration_secs': 0.256284} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.677957] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4212d083-12ac-4274-a0f3-2b3784acfb6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.681069] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.682270] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-015b1adc-087b-47e3-8e39-d1dfc3973ca7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.690491] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.690981] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df838569-0a14-460a-a6bf-58188886bb20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.694351] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 879.694351] env[68217]: value = "task-2961408" [ 879.694351] env[68217]: _type = "Task" [ 879.694351] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.703968] env[68217]: DEBUG oslo_vmware.api [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 879.703968] env[68217]: value = "task-2961409" [ 879.703968] env[68217]: _type = "Task" [ 879.703968] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.711564] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961408, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.718606] env[68217]: DEBUG oslo_vmware.api [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961409, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.727065] env[68217]: DEBUG nova.policy [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fd08981ea724019826d597a1c8b4ecd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6233e9874c41329f81c990f8bc72b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 879.757772] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961404, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.889818] env[68217]: DEBUG oslo_vmware.api [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961402, 'name': PowerOnVM_Task, 'duration_secs': 1.69769} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.894622] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 879.894927] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1a810568-1301-4476-bedf-6769521e4895 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance '2612f6fc-a43f-4011-8a09-51088a49371a' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 879.911768] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 879.911768] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ac9c3e-cbdf-6568-bf70-d65fe0d928ae" [ 879.911768] env[68217]: _type = "HttpNfcLease" [ 879.911768] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 879.917500] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 880.092444] env[68217]: DEBUG nova.network.neutron [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 880.148212] env[68217]: DEBUG nova.compute.utils [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 880.168153] env[68217]: DEBUG nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 880.187997] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b186e14b-694e-4b28-b7fd-af69a1f1ec56 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.199815] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0401546-c457-4854-92d2-a5bbe5b29394 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.216338] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961408, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.244848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a22b6b2d-bf89-4265-b401-cf592a64e496 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.251197] env[68217]: DEBUG oslo_vmware.api [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961409, 'name': PowerOffVM_Task, 'duration_secs': 0.270354} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.252394] env[68217]: DEBUG nova.network.neutron [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Successfully created port: 4f18a3c9-df30-45b6-9176-4c99f80d5bad {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 880.257609] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 880.257798] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 880.258107] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1cdf2ddb-3e1e-454d-9430-134337b345e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.260535] env[68217]: DEBUG nova.network.neutron [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 880.269231] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec1f638d-0c52-413c-8594-02f97cb6dfff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.274476] env[68217]: DEBUG oslo_vmware.api [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961404, 'name': RemoveSnapshot_Task, 'duration_secs': 1.557408} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.275652] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 880.276032] env[68217]: INFO nova.compute.manager [None req-823e7427-d0bb-41dc-b43f-0c0dbc8a38d8 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Took 15.70 seconds to snapshot the instance on the hypervisor. [ 880.290568] env[68217]: DEBUG nova.compute.provider_tree [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 880.342321] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 880.342628] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 880.342825] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleting the datastore file [datastore2] a513976b-4859-4822-8989-c9452db62ee6 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 880.343207] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39a19cea-9408-420f-be6a-7913d1edffff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.352949] env[68217]: DEBUG oslo_vmware.api [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for the task: (returnval){ [ 880.352949] env[68217]: value = "task-2961411" [ 880.352949] env[68217]: _type = "Task" [ 880.352949] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.361659] env[68217]: DEBUG oslo_vmware.api [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961411, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.399131] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 880.399131] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ac9c3e-cbdf-6568-bf70-d65fe0d928ae" [ 880.399131] env[68217]: _type = "HttpNfcLease" [ 880.399131] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 880.399474] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 880.399474] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ac9c3e-cbdf-6568-bf70-d65fe0d928ae" [ 880.399474] env[68217]: _type = "HttpNfcLease" [ 880.399474] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 880.400296] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a9d2394-a997-4b38-8f0f-94fe3606b3f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.422847] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b2cac-f7da-f579-1331-05fb6ba4d723/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 880.423024] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating HTTP connection to write to file with size = 31665664 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b2cac-f7da-f579-1331-05fb6ba4d723/disk-0.vmdk. {{(pid=68217) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 880.490024] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-3753bf21-106b-44ca-8254-163fe3062e0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.651753] env[68217]: DEBUG oslo_concurrency.lockutils [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 880.712350] env[68217]: DEBUG oslo_vmware.api [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961408, 'name': PowerOnVM_Task, 'duration_secs': 0.582354} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.714906] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 880.715407] env[68217]: INFO nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Took 8.45 seconds to spawn the instance on the hypervisor. [ 880.715460] env[68217]: DEBUG nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 880.716474] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15922774-2524-4810-9579-f7d62019d618 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.765260] env[68217]: DEBUG oslo_concurrency.lockutils [req-85e99ea0-6ce5-42ae-a760-5c99cdc63195 req-caa3fefb-de50-4114-9f8b-295b3c100b83 service nova] Releasing lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 880.767173] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 880.767321] env[68217]: DEBUG nova.network.neutron [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 880.794162] env[68217]: DEBUG nova.scheduler.client.report [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 880.866778] env[68217]: DEBUG oslo_vmware.api [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Task: {'id': task-2961411, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157392} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.868352] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 880.868594] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 880.868818] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 880.869086] env[68217]: INFO nova.compute.manager [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] [instance: a513976b-4859-4822-8989-c9452db62ee6] Took 1.19 seconds to destroy the instance on the hypervisor. [ 880.869394] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 880.869638] env[68217]: DEBUG nova.compute.manager [-] [instance: a513976b-4859-4822-8989-c9452db62ee6] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 880.869785] env[68217]: DEBUG nova.network.neutron [-] [instance: a513976b-4859-4822-8989-c9452db62ee6] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 881.178566] env[68217]: DEBUG nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 881.204467] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 881.204824] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 881.204998] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 881.205203] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 881.205356] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 881.205499] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 881.205702] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 881.205863] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 881.206042] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 881.206218] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 881.206396] env[68217]: DEBUG nova.virt.hardware [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 881.207552] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b223a517-15f5-4029-ace4-690293728efd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.216399] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b9be070-12cf-436e-8f13-c95ae05c674e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.241466] env[68217]: INFO nova.compute.manager [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Took 20.10 seconds to build instance. [ 881.299879] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.146s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.304150] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.742s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.304150] env[68217]: DEBUG nova.objects.instance [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lazy-loading 'resources' on Instance uuid 62628aed-e2f9-478f-bed7-00757fc3c484 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.308463] env[68217]: DEBUG nova.network.neutron [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 881.335480] env[68217]: INFO nova.scheduler.client.report [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Deleted allocations for instance 09290e60-7751-408e-9d6d-20e7cb61767b [ 881.610066] env[68217]: DEBUG nova.network.neutron [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Updating instance_info_cache with network_info: [{"id": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "address": "fa:16:3e:5b:2a:7a", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5002fb09-dd", "ovs_interfaceid": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.650255] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa509f-9d9d-2195-0631-532e9b8d0b6e/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 881.651723] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929d1830-c989-41ab-9832-90a5d2f30f87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.660510] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa509f-9d9d-2195-0631-532e9b8d0b6e/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 881.660510] env[68217]: ERROR oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa509f-9d9d-2195-0631-532e9b8d0b6e/disk-0.vmdk due to incomplete transfer. [ 881.660510] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-ae3ece40-30b9-4d5f-8aab-61d5e6045f1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.670367] env[68217]: DEBUG oslo_vmware.rw_handles [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52aa509f-9d9d-2195-0631-532e9b8d0b6e/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 881.670522] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Uploaded image 8f1b0ac2-efb1-4feb-8dce-4206b58682bf to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 881.673612] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 881.675943] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4b406f75-9f2d-4fb7-a962-3cd7a06ee25c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.683743] env[68217]: DEBUG nova.network.neutron [-] [instance: a513976b-4859-4822-8989-c9452db62ee6] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.694561] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 881.694561] env[68217]: value = "task-2961412" [ 881.694561] env[68217]: _type = "Task" [ 881.694561] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.702524] env[68217]: DEBUG nova.compute.manager [req-76f30ef2-0160-4dcb-8c04-c92519f1ce3f req-7bf49550-d411-4803-8af3-c0bd9ec14bcb service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Received event network-vif-deleted-6d2d73f5-5a44-440a-b5f5-6b98ba92a165 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 881.704347] env[68217]: INFO nova.compute.manager [req-76f30ef2-0160-4dcb-8c04-c92519f1ce3f req-7bf49550-d411-4803-8af3-c0bd9ec14bcb service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Neutron deleted interface 6d2d73f5-5a44-440a-b5f5-6b98ba92a165; detaching it from the instance and deleting it from the info cache [ 881.704347] env[68217]: DEBUG nova.network.neutron [req-76f30ef2-0160-4dcb-8c04-c92519f1ce3f req-7bf49550-d411-4803-8af3-c0bd9ec14bcb service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 881.720632] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961412, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.742228] env[68217]: DEBUG oslo_concurrency.lockutils [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "da1524a7-2756-4429-ada2-b1f493544bd2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 881.742990] env[68217]: DEBUG oslo_concurrency.lockutils [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 881.743313] env[68217]: INFO nova.compute.manager [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Attaching volume 52fa266f-c8e9-4d71-a291-87ae7c831366 to /dev/sdb [ 881.746808] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Completed reading data from the image iterator. {{(pid=68217) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 881.747014] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b2cac-f7da-f579-1331-05fb6ba4d723/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 881.747525] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6c22a6af-3c53-4ed9-80f1-228250f4ee8d tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.622s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.748645] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b68495a-e2a0-4fbb-b4e6-9c670679ac44 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.758135] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b2cac-f7da-f579-1331-05fb6ba4d723/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 881.758460] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b2cac-f7da-f579-1331-05fb6ba4d723/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 881.758548] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-7cfc2b39-ad8e-4e79-b675-7f987619acaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.786094] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57ef1ec7-e5ca-4984-af9d-b11830c16dce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.795280] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12c42614-439f-433a-baff-66be4029d087 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.815279] env[68217]: DEBUG nova.virt.block_device [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updating existing volume attachment record: 79ab4a65-33eb-4ab0-9548-28c345c846fd {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 881.848268] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffe67850-d3e3-4442-be2b-c61ca10d7ff6 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "09290e60-7751-408e-9d6d-20e7cb61767b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.840s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 881.966401] env[68217]: DEBUG oslo_vmware.rw_handles [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/521b2cac-f7da-f579-1331-05fb6ba4d723/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 881.967276] env[68217]: INFO nova.virt.vmwareapi.images [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Downloaded image file data 31c3d1c5-dcbd-447b-935c-0ac48e805003 [ 881.969029] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ea4184-fe13-41e2-bcfa-51d54801de4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.992373] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-38686713-ba6d-4cd9-94cb-49cc987682b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.023291] env[68217]: INFO nova.virt.vmwareapi.images [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] The imported VM was unregistered [ 882.027215] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Caching image {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 882.027215] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Creating directory with path [datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 882.027215] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46515a4d-613e-4824-90c9-9a8d079e1e5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.029754] env[68217]: DEBUG nova.network.neutron [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Successfully updated port: 4f18a3c9-df30-45b6-9176-4c99f80d5bad {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 882.040053] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Created directory with path [datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 882.040270] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9/OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9.vmdk to [datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003/31c3d1c5-dcbd-447b-935c-0ac48e805003.vmdk. {{(pid=68217) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 882.041157] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-0fd647b3-d230-4ff0-99e2-ce6f504e6cc4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.053965] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 882.053965] env[68217]: value = "task-2961415" [ 882.053965] env[68217]: _type = "Task" [ 882.053965] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.066344] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961415, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.113069] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 882.113435] env[68217]: DEBUG nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Instance network_info: |[{"id": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "address": "fa:16:3e:5b:2a:7a", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5002fb09-dd", "ovs_interfaceid": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 882.114009] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5b:2a:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5002fb09-ddc6-4497-a55f-8cfe415c4d70', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 882.124021] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 882.124799] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 882.125076] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0ff1d80f-11b9-47a4-ad6e-f8dfb434bf4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.150183] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 882.150183] env[68217]: value = "task-2961416" [ 882.150183] env[68217]: _type = "Task" [ 882.150183] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.166605] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961416, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.208525] env[68217]: INFO nova.compute.manager [-] [instance: a513976b-4859-4822-8989-c9452db62ee6] Took 1.33 seconds to deallocate network for instance. [ 882.220308] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5ad5da5b-6531-4d62-8d85-56ea89905569 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.233942] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961412, 'name': Destroy_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.238458] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2aae0e00-2f1a-4d07-8356-8c49931d0e13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.293280] env[68217]: DEBUG nova.compute.manager [req-76f30ef2-0160-4dcb-8c04-c92519f1ce3f req-7bf49550-d411-4803-8af3-c0bd9ec14bcb service nova] [instance: a513976b-4859-4822-8989-c9452db62ee6] Detach interface failed, port_id=6d2d73f5-5a44-440a-b5f5-6b98ba92a165, reason: Instance a513976b-4859-4822-8989-c9452db62ee6 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 882.409232] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b0ed5ae-a09d-421d-8ee9-484cc611266e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.424602] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a729c4f3-2246-4b5d-ba0c-889086a791d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.461546] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12e6b6e0-fe82-40da-bf6f-5fbf45f45962 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.472034] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24adc2e1-90ea-4f06-bc45-29a4e9e1e565 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.490978] env[68217]: DEBUG nova.compute.provider_tree [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 882.532673] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "refresh_cache-a7625a02-993b-4577-8d42-f763858a6154" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 882.532836] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "refresh_cache-a7625a02-993b-4577-8d42-f763858a6154" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 882.533122] env[68217]: DEBUG nova.network.neutron [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 882.550137] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "e550084b-84dd-4ae8-8667-2edb45b49e2b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.550369] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.550596] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "e550084b-84dd-4ae8-8667-2edb45b49e2b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.550901] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.551063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 882.554036] env[68217]: INFO nova.compute.manager [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Terminating instance [ 882.567855] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961415, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.662426] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961416, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.709955] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "2612f6fc-a43f-4011-8a09-51088a49371a" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.710281] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 882.710541] env[68217]: DEBUG nova.compute.manager [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Going to confirm migration 2 {{(pid=68217) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 882.718848] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961412, 'name': Destroy_Task, 'duration_secs': 0.650748} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.719133] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Destroyed the VM [ 882.719417] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 882.719777] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-f0373929-bcc4-45a8-84ea-e92462ffcf36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.724246] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 882.728485] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 882.728485] env[68217]: value = "task-2961419" [ 882.728485] env[68217]: _type = "Task" [ 882.728485] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.738190] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961419, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.994652] env[68217]: DEBUG nova.scheduler.client.report [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 883.061812] env[68217]: DEBUG nova.compute.manager [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 883.062121] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.063085] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63a5692-e515-435a-a4e3-995835637d88 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.071114] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961415, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.079396] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.079710] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4f075a6d-db7d-4303-9715-b9fa6d1830ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.090312] env[68217]: DEBUG oslo_vmware.api [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 883.090312] env[68217]: value = "task-2961420" [ 883.090312] env[68217]: _type = "Task" [ 883.090312] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.103546] env[68217]: DEBUG oslo_vmware.api [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961420, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.106793] env[68217]: DEBUG nova.network.neutron [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 883.164668] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961416, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.243513] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961419, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.299136] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.299343] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.299524] env[68217]: DEBUG nova.network.neutron [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 883.299709] env[68217]: DEBUG nova.objects.instance [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lazy-loading 'info_cache' on Instance uuid 2612f6fc-a43f-4011-8a09-51088a49371a {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 883.401874] env[68217]: DEBUG nova.compute.manager [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 883.406117] env[68217]: DEBUG nova.network.neutron [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Updating instance_info_cache with network_info: [{"id": "4f18a3c9-df30-45b6-9176-4c99f80d5bad", "address": "fa:16:3e:d7:05:b1", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f18a3c9-df", "ovs_interfaceid": "4f18a3c9-df30-45b6-9176-4c99f80d5bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.500602] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.197s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.503184] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.511s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.505046] env[68217]: INFO nova.compute.claims [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 883.531673] env[68217]: INFO nova.scheduler.client.report [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Deleted allocations for instance 62628aed-e2f9-478f-bed7-00757fc3c484 [ 883.568615] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961415, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.603349] env[68217]: DEBUG oslo_vmware.api [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961420, 'name': PowerOffVM_Task, 'duration_secs': 0.5126} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.603688] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.603886] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.604193] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fbc85726-b085-4700-8cee-fa50d20f3b75 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.664252] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961416, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.736990] env[68217]: DEBUG nova.compute.manager [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Received event network-vif-plugged-4f18a3c9-df30-45b6-9176-4c99f80d5bad {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.737239] env[68217]: DEBUG oslo_concurrency.lockutils [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] Acquiring lock "a7625a02-993b-4577-8d42-f763858a6154-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.737554] env[68217]: DEBUG oslo_concurrency.lockutils [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] Lock "a7625a02-993b-4577-8d42-f763858a6154-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 883.737728] env[68217]: DEBUG oslo_concurrency.lockutils [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] Lock "a7625a02-993b-4577-8d42-f763858a6154-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 883.737901] env[68217]: DEBUG nova.compute.manager [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] No waiting events found dispatching network-vif-plugged-4f18a3c9-df30-45b6-9176-4c99f80d5bad {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 883.738125] env[68217]: WARNING nova.compute.manager [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Received unexpected event network-vif-plugged-4f18a3c9-df30-45b6-9176-4c99f80d5bad for instance with vm_state building and task_state spawning. [ 883.738317] env[68217]: DEBUG nova.compute.manager [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Received event network-changed-4f18a3c9-df30-45b6-9176-4c99f80d5bad {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 883.738562] env[68217]: DEBUG nova.compute.manager [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Refreshing instance network info cache due to event network-changed-4f18a3c9-df30-45b6-9176-4c99f80d5bad. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 883.739616] env[68217]: DEBUG oslo_concurrency.lockutils [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] Acquiring lock "refresh_cache-a7625a02-993b-4577-8d42-f763858a6154" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.745332] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961419, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.913788] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "refresh_cache-a7625a02-993b-4577-8d42-f763858a6154" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 883.915127] env[68217]: DEBUG nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Instance network_info: |[{"id": "4f18a3c9-df30-45b6-9176-4c99f80d5bad", "address": "fa:16:3e:d7:05:b1", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f18a3c9-df", "ovs_interfaceid": "4f18a3c9-df30-45b6-9176-4c99f80d5bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 883.916687] env[68217]: DEBUG oslo_concurrency.lockutils [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] Acquired lock "refresh_cache-a7625a02-993b-4577-8d42-f763858a6154" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 883.917416] env[68217]: DEBUG nova.network.neutron [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Refreshing network info cache for port 4f18a3c9-df30-45b6-9176-4c99f80d5bad {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 883.921239] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d7:05:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4f18a3c9-df30-45b6-9176-4c99f80d5bad', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 883.929371] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 883.929981] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7625a02-993b-4577-8d42-f763858a6154] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 883.930241] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e6431bdf-e76c-4a34-a887-7bfb5ee55c1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.948758] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 883.958190] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 883.958190] env[68217]: value = "task-2961422" [ 883.958190] env[68217]: _type = "Task" [ 883.958190] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.967150] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961422, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.044355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-72175823-f30a-4a7a-9bbb-5ca573920823 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582 tempest-FloatingIPsAssociationNegativeTestJSON-1907692582-project-member] Lock "62628aed-e2f9-478f-bed7-00757fc3c484" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.907s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.074528] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961415, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.076035] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.076249] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.076439] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Deleting the datastore file [datastore2] e550084b-84dd-4ae8-8667-2edb45b49e2b {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.076782] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c0900fa6-1a6c-486b-86a6-71c99d02c9d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.086978] env[68217]: DEBUG oslo_vmware.api [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for the task: (returnval){ [ 884.086978] env[68217]: value = "task-2961423" [ 884.086978] env[68217]: _type = "Task" [ 884.086978] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.096292] env[68217]: DEBUG oslo_vmware.api [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.164216] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961416, 'name': CreateVM_Task, 'duration_secs': 1.622146} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.164523] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.165466] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.165697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.166105] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 884.166484] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfd01210-7099-4eb7-a4bd-4f6b73819d0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.173281] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 884.173281] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524b1d99-3c48-d299-adff-588ee8ef8948" [ 884.173281] env[68217]: _type = "Task" [ 884.173281] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.184484] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524b1d99-3c48-d299-adff-588ee8ef8948, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.241236] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961419, 'name': RemoveSnapshot_Task, 'duration_secs': 1.330368} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.241618] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 884.241963] env[68217]: DEBUG nova.compute.manager [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 884.242829] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dba2d9d-ffd9-46b6-8936-93dcb1bd793d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.472355] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961422, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.575174] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961415, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.597323] env[68217]: DEBUG oslo_vmware.api [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961423, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.694201] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524b1d99-3c48-d299-adff-588ee8ef8948, 'name': SearchDatastore_Task, 'duration_secs': 0.026529} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.694522] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 884.695019] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 884.696777] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.696777] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.696777] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 884.696777] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-23c10bce-bbcf-4db5-b65b-1c5116fe4024 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.712146] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 884.712146] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 884.713149] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76614e92-d7c0-41fc-b5ae-6c735c397ef5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.721613] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 884.721613] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527249b9-8196-218d-6646-800bb7e48598" [ 884.721613] env[68217]: _type = "Task" [ 884.721613] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.732721] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527249b9-8196-218d-6646-800bb7e48598, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.757927] env[68217]: INFO nova.compute.manager [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Shelve offloading [ 884.780906] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "b7fe971e-353f-427c-896c-32f9de0d70e7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.781083] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.781216] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "b7fe971e-353f-427c-896c-32f9de0d70e7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 884.781550] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 884.782195] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 884.790784] env[68217]: INFO nova.compute.manager [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Terminating instance [ 884.894508] env[68217]: DEBUG nova.network.neutron [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance_info_cache with network_info: [{"id": "49bc7718-8633-456d-b4d1-6bcc8493670b", "address": "fa:16:3e:8b:bf:69", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap49bc7718-86", "ovs_interfaceid": "49bc7718-8633-456d-b4d1-6bcc8493670b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.931708] env[68217]: DEBUG nova.network.neutron [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Updated VIF entry in instance network info cache for port 4f18a3c9-df30-45b6-9176-4c99f80d5bad. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 884.932255] env[68217]: DEBUG nova.network.neutron [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Updating instance_info_cache with network_info: [{"id": "4f18a3c9-df30-45b6-9176-4c99f80d5bad", "address": "fa:16:3e:d7:05:b1", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4f18a3c9-df", "ovs_interfaceid": "4f18a3c9-df30-45b6-9176-4c99f80d5bad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.968887] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961422, 'name': CreateVM_Task, 'duration_secs': 0.513363} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.969079] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a7625a02-993b-4577-8d42-f763858a6154] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 884.969738] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 884.970163] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 884.970263] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 884.970484] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-029a28d9-0bdb-41c2-ba81-0bacbe2fc0aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.975755] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 884.975755] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52596366-9ab5-1ecc-ac63-ce3f98c76928" [ 884.975755] env[68217]: _type = "Task" [ 884.975755] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.988846] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52596366-9ab5-1ecc-ac63-ce3f98c76928, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.035321] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4251872b-0b03-4162-87a2-70f1e2416364 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.043557] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d618dcb-e9ed-44bc-8f02-d3bcb4d55a0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.079172] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6349a26c-e034-4cfb-8425-f2e9c646f126 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.089409] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961415, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.657455} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.090676] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-228e3353-8a8a-4d28-a239-04c1889169c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.098497] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9/OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9.vmdk to [datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003/31c3d1c5-dcbd-447b-935c-0ac48e805003.vmdk. [ 885.098606] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Cleaning up location [datastore2] OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 885.098703] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_f5200c02-3c63-49f6-89db-1072b8c0fad9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.099067] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a4082a4e-15bb-48dd-90aa-b95b0e969567 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.112744] env[68217]: DEBUG nova.compute.provider_tree [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 885.120605] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 885.120605] env[68217]: value = "task-2961425" [ 885.120605] env[68217]: _type = "Task" [ 885.120605] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.120605] env[68217]: DEBUG oslo_vmware.api [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Task: {'id': task-2961423, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.966285} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.120605] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.120605] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 885.120605] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 885.121068] env[68217]: INFO nova.compute.manager [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Took 2.06 seconds to destroy the instance on the hypervisor. [ 885.121068] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 885.123115] env[68217]: DEBUG nova.compute.manager [-] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 885.123220] env[68217]: DEBUG nova.network.neutron [-] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 885.131785] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.239790] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527249b9-8196-218d-6646-800bb7e48598, 'name': SearchDatastore_Task, 'duration_secs': 0.020922} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.240642] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6fc6ea3-49d5-43ad-81c8-4ba7c1b05d46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.247465] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 885.247465] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5291bc0f-2b8f-80a9-9bb1-da3e65829576" [ 885.247465] env[68217]: _type = "Task" [ 885.247465] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.257236] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5291bc0f-2b8f-80a9-9bb1-da3e65829576, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.262258] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.262507] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cdea78b2-664c-4f5b-aa0f-fca5d23c1e24 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.269855] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 885.269855] env[68217]: value = "task-2961426" [ 885.269855] env[68217]: _type = "Task" [ 885.269855] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.279161] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961426, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.304067] env[68217]: DEBUG nova.compute.manager [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 885.304301] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 885.305166] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d49fa3d-45a4-4160-a8f4-b22d87b84612 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.314273] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 885.314273] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69e4f60c-d4ae-45a3-b1c9-49401d25df0e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.322514] env[68217]: DEBUG oslo_vmware.api [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 885.322514] env[68217]: value = "task-2961427" [ 885.322514] env[68217]: _type = "Task" [ 885.322514] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.334314] env[68217]: DEBUG oslo_vmware.api [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961427, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.397854] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-2612f6fc-a43f-4011-8a09-51088a49371a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.398149] env[68217]: DEBUG nova.objects.instance [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lazy-loading 'migration_context' on Instance uuid 2612f6fc-a43f-4011-8a09-51088a49371a {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.440152] env[68217]: DEBUG oslo_concurrency.lockutils [req-9f162edf-013f-41d0-9db3-88b5f1caa796 req-37841ea1-e231-49cf-a9a6-becbb3a81ad7 service nova] Releasing lock "refresh_cache-a7625a02-993b-4577-8d42-f763858a6154" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.489527] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52596366-9ab5-1ecc-ac63-ce3f98c76928, 'name': SearchDatastore_Task, 'duration_secs': 0.053113} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.490239] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.490239] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 885.490425] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.620492] env[68217]: DEBUG nova.scheduler.client.report [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 885.637232] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.139825} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.638959] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 885.643016] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "[datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003/31c3d1c5-dcbd-447b-935c-0ac48e805003.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.643332] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003/31c3d1c5-dcbd-447b-935c-0ac48e805003.vmdk to [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.643946] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2cf511a4-dcd7-4f9d-be09-b6a6fbd29b69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.655066] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 885.655066] env[68217]: value = "task-2961428" [ 885.655066] env[68217]: _type = "Task" [ 885.655066] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.665662] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961428, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.761858] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5291bc0f-2b8f-80a9-9bb1-da3e65829576, 'name': SearchDatastore_Task, 'duration_secs': 0.01627} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.762181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 885.762448] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ca9ef7ff-b942-4363-a4f8-9163791ec162/ca9ef7ff-b942-4363-a4f8-9163791ec162.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 885.762804] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.763064] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 885.763364] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d49843a-652b-405d-84f5-57b7c5848b46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.765511] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9dd05bab-a2a8-4d49-bae9-8acbd1a8125a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.769954] env[68217]: DEBUG nova.compute.manager [req-fb611068-7052-4ad1-b296-10c5d130849e req-e2d4816e-1821-4bd9-b4ba-07b656ac6534 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Received event network-vif-deleted-0abd5109-c94f-4eba-b6b2-ca8b28794157 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 885.770160] env[68217]: INFO nova.compute.manager [req-fb611068-7052-4ad1-b296-10c5d130849e req-e2d4816e-1821-4bd9-b4ba-07b656ac6534 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Neutron deleted interface 0abd5109-c94f-4eba-b6b2-ca8b28794157; detaching it from the instance and deleting it from the info cache [ 885.770396] env[68217]: DEBUG nova.network.neutron [req-fb611068-7052-4ad1-b296-10c5d130849e req-e2d4816e-1821-4bd9-b4ba-07b656ac6534 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.777168] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 885.777168] env[68217]: value = "task-2961429" [ 885.777168] env[68217]: _type = "Task" [ 885.777168] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.781624] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 885.781813] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 885.788079] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d0b205ec-c906-4207-b2b5-32a76f6d5e17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.791593] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 885.791805] env[68217]: DEBUG nova.compute.manager [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 885.793112] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bedb8b8-b29b-407b-97a2-f69b6203bc5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.800318] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 885.800318] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272dea3-bd62-9501-950a-f0e37d7ea70f" [ 885.800318] env[68217]: _type = "Task" [ 885.800318] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.800544] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961429, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.806108] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.806200] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 885.806336] env[68217]: DEBUG nova.network.neutron [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 885.813448] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272dea3-bd62-9501-950a-f0e37d7ea70f, 'name': SearchDatastore_Task, 'duration_secs': 0.010253} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.814521] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d5879ce-0902-45f3-ab55-3754adacd81b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.820453] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 885.820453] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52410903-4613-afa8-d82f-8af69e5d5a81" [ 885.820453] env[68217]: _type = "Task" [ 885.820453] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.834957] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52410903-4613-afa8-d82f-8af69e5d5a81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.839038] env[68217]: DEBUG oslo_vmware.api [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961427, 'name': PowerOffVM_Task, 'duration_secs': 0.385349} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.839038] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 885.839038] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 885.839038] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-382cd584-5a88-49ed-b012-67571085303a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.901946] env[68217]: DEBUG nova.objects.base [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Object Instance<2612f6fc-a43f-4011-8a09-51088a49371a> lazy-loaded attributes: info_cache,migration_context {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 885.903189] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6621edfa-429a-4ca7-8555-0f4b7d36cca2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.931512] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-caa7c3fe-95ad-4a8f-858c-17d51fe2bb86 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.934729] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 885.935570] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 885.935570] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Deleting the datastore file [datastore1] b7fe971e-353f-427c-896c-32f9de0d70e7 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 885.938057] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19f97162-8be7-431a-8fea-526fd8c8ab89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.945507] env[68217]: DEBUG oslo_vmware.api [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 885.945507] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272efe4-7feb-22d5-ee16-96980333efe4" [ 885.945507] env[68217]: _type = "Task" [ 885.945507] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.949902] env[68217]: DEBUG oslo_vmware.api [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 885.949902] env[68217]: value = "task-2961431" [ 885.949902] env[68217]: _type = "Task" [ 885.949902] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.958079] env[68217]: DEBUG oslo_vmware.api [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272efe4-7feb-22d5-ee16-96980333efe4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.967260] env[68217]: DEBUG oslo_vmware.api [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.131372] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.628s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 886.131917] env[68217]: DEBUG nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 886.135788] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.411s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 886.136153] env[68217]: DEBUG nova.objects.instance [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lazy-loading 'resources' on Instance uuid a513976b-4859-4822-8989-c9452db62ee6 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.144530] env[68217]: DEBUG nova.network.neutron [-] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.170225] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961428, 'name': CopyVirtualDisk_Task} progress is 21%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.274016] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-365d1e99-a706-442a-aae5-cfd898ab35a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.289577] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a82ed2b-e78a-4254-b931-b0a7d159f204 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.305673] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961429, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.345809] env[68217]: DEBUG nova.compute.manager [req-fb611068-7052-4ad1-b296-10c5d130849e req-e2d4816e-1821-4bd9-b4ba-07b656ac6534 service nova] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Detach interface failed, port_id=0abd5109-c94f-4eba-b6b2-ca8b28794157, reason: Instance e550084b-84dd-4ae8-8667-2edb45b49e2b could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 886.357178] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52410903-4613-afa8-d82f-8af69e5d5a81, 'name': SearchDatastore_Task, 'duration_secs': 0.014493} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.357464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 886.357799] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a7625a02-993b-4577-8d42-f763858a6154/a7625a02-993b-4577-8d42-f763858a6154.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 886.358101] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5c3391cf-64a3-412a-b785-c35931fe8a1b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.372754] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 886.372754] env[68217]: value = "task-2961432" [ 886.372754] env[68217]: _type = "Task" [ 886.372754] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.385988] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961432, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.455975] env[68217]: DEBUG oslo_vmware.api [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272efe4-7feb-22d5-ee16-96980333efe4, 'name': SearchDatastore_Task, 'duration_secs': 0.031299} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.461023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 886.466600] env[68217]: DEBUG oslo_vmware.api [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961431, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.640903] env[68217]: DEBUG nova.compute.utils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 886.646829] env[68217]: DEBUG nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 886.647201] env[68217]: DEBUG nova.network.neutron [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 886.649373] env[68217]: INFO nova.compute.manager [-] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Took 1.53 seconds to deallocate network for instance. [ 886.685265] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961428, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.706673] env[68217]: DEBUG nova.network.neutron [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Updating instance_info_cache with network_info: [{"id": "f10ba1e7-ec20-4ece-a5e6-c0e47e42e986", "address": "fa:16:3e:bd:30:8a", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf10ba1e7-ec", "ovs_interfaceid": "f10ba1e7-ec20-4ece-a5e6-c0e47e42e986", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.727813] env[68217]: DEBUG nova.policy [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36276be4c67c4abfa0941293d4cc800b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebfeb38b81794c558c1164cecd7fa221', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 886.794349] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961429, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676964} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.794814] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ca9ef7ff-b942-4363-a4f8-9163791ec162/ca9ef7ff-b942-4363-a4f8-9163791ec162.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 886.795090] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 886.795440] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ac137c99-461c-4499-937b-373a00342792 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.811252] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 886.811252] env[68217]: value = "task-2961433" [ 886.811252] env[68217]: _type = "Task" [ 886.811252] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.847979] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961433, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.885360] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 886.885673] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594311', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'name': 'volume-52fa266f-c8e9-4d71-a291-87ae7c831366', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da1524a7-2756-4429-ada2-b1f493544bd2', 'attached_at': '', 'detached_at': '', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'serial': '52fa266f-c8e9-4d71-a291-87ae7c831366'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 886.886971] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2161cdc2-340f-4826-ba4e-41fcad213866 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.905161] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961432, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.928514] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee66eed-7469-4a8c-b85b-2d9f68dfec6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.961457] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Reconfiguring VM instance instance-00000024 to attach disk [datastore1] volume-52fa266f-c8e9-4d71-a291-87ae7c831366/volume-52fa266f-c8e9-4d71-a291-87ae7c831366.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 886.964803] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aba2d22d-c488-4b42-bec0-c5102009b926 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.989622] env[68217]: DEBUG oslo_vmware.api [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961431, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.583589} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.994642] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 886.994905] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 886.995102] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 886.995998] env[68217]: INFO nova.compute.manager [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Took 1.69 seconds to destroy the instance on the hypervisor. [ 886.995998] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 886.995998] env[68217]: DEBUG oslo_vmware.api [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 886.995998] env[68217]: value = "task-2961434" [ 886.995998] env[68217]: _type = "Task" [ 886.995998] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.996375] env[68217]: DEBUG nova.compute.manager [-] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 886.996517] env[68217]: DEBUG nova.network.neutron [-] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 887.014121] env[68217]: DEBUG oslo_vmware.api [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961434, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.146223] env[68217]: DEBUG nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 887.172749] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.179043] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961428, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.214193] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 887.252152] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e7c1ddd-8d0e-4292-85cc-32f3aafabb14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.265093] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac8aa4a-1171-4ad9-8278-b3c611aa1a15 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.308218] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad4a0385-5ab7-4f40-bca0-630f622655e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.322316] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-338452d4-eb92-4be4-bad6-28543eba59ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.327113] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961433, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.233182} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.327749] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.329031] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3108f3d4-9565-40b8-aaa5-f493501110db {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.340499] env[68217]: DEBUG nova.compute.provider_tree [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.366952] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] ca9ef7ff-b942-4363-a4f8-9163791ec162/ca9ef7ff-b942-4363-a4f8-9163791ec162.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.370325] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f2a7a94f-6260-46aa-b711-a467ddcb1c86 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.401036] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961432, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594146} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.401036] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a7625a02-993b-4577-8d42-f763858a6154/a7625a02-993b-4577-8d42-f763858a6154.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 887.401036] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 887.401316] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 887.401316] env[68217]: value = "task-2961435" [ 887.401316] env[68217]: _type = "Task" [ 887.401316] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.401316] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8df470d0-be88-4bbb-a910-8e59088dfe96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.414366] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961435, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.415774] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 887.415774] env[68217]: value = "task-2961436" [ 887.415774] env[68217]: _type = "Task" [ 887.415774] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.431798] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961436, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.513352] env[68217]: DEBUG oslo_vmware.api [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961434, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.529500] env[68217]: DEBUG nova.network.neutron [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Successfully created port: a2616d12-5ede-48a3-8191-248418505394 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.638708] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 887.638708] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea0974f0-a9d5-4dac-9998-530cde5c22b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.657256] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 887.658219] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c0d20e46-5aa2-4d77-8fd9-1dcd51689656 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.674921] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961428, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.757534] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 887.757534] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 887.757534] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleting the datastore file [datastore1] 03d61c68-1b37-4172-b276-67a73a0dc228 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 887.757534] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ea954543-cf7f-4506-b716-c280ddbd2724 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.774851] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 887.774851] env[68217]: value = "task-2961438" [ 887.774851] env[68217]: _type = "Task" [ 887.774851] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.788575] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961438, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.793559] env[68217]: DEBUG nova.compute.manager [req-46bd5eec-3321-460b-bb3a-0f3b8ea2c0b0 req-b56f8e17-3d2c-4cb0-972e-b7b864efd8a2 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Received event network-vif-unplugged-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.793559] env[68217]: DEBUG oslo_concurrency.lockutils [req-46bd5eec-3321-460b-bb3a-0f3b8ea2c0b0 req-b56f8e17-3d2c-4cb0-972e-b7b864efd8a2 service nova] Acquiring lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 887.793559] env[68217]: DEBUG oslo_concurrency.lockutils [req-46bd5eec-3321-460b-bb3a-0f3b8ea2c0b0 req-b56f8e17-3d2c-4cb0-972e-b7b864efd8a2 service nova] Lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 887.793822] env[68217]: DEBUG oslo_concurrency.lockutils [req-46bd5eec-3321-460b-bb3a-0f3b8ea2c0b0 req-b56f8e17-3d2c-4cb0-972e-b7b864efd8a2 service nova] Lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 887.793861] env[68217]: DEBUG nova.compute.manager [req-46bd5eec-3321-460b-bb3a-0f3b8ea2c0b0 req-b56f8e17-3d2c-4cb0-972e-b7b864efd8a2 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] No waiting events found dispatching network-vif-unplugged-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 887.794560] env[68217]: WARNING nova.compute.manager [req-46bd5eec-3321-460b-bb3a-0f3b8ea2c0b0 req-b56f8e17-3d2c-4cb0-972e-b7b864efd8a2 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Received unexpected event network-vif-unplugged-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 for instance with vm_state shelved and task_state shelving_offloading. [ 887.831118] env[68217]: DEBUG nova.compute.manager [req-4c124598-a52a-49e5-b123-d75cec94a2b3 req-b12cf2ba-0b11-4490-801e-dc7dded2856d service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Received event network-vif-deleted-cdcfcb29-1a21-4c5f-a612-ab70b34a44b1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 887.831118] env[68217]: INFO nova.compute.manager [req-4c124598-a52a-49e5-b123-d75cec94a2b3 req-b12cf2ba-0b11-4490-801e-dc7dded2856d service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Neutron deleted interface cdcfcb29-1a21-4c5f-a612-ab70b34a44b1; detaching it from the instance and deleting it from the info cache [ 887.831118] env[68217]: DEBUG nova.network.neutron [req-4c124598-a52a-49e5-b123-d75cec94a2b3 req-b12cf2ba-0b11-4490-801e-dc7dded2856d service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 887.844631] env[68217]: DEBUG nova.scheduler.client.report [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 887.922827] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961435, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.934527] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961436, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083486} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.934809] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 887.935704] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07b0784f-6440-4a5b-b772-9bc3689d028a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.963682] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Reconfiguring VM instance instance-0000004d to attach disk [datastore1] a7625a02-993b-4577-8d42-f763858a6154/a7625a02-993b-4577-8d42-f763858a6154.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 887.964906] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4efb4374-20c0-427a-8bd3-da318f699faf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.990940] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 887.990940] env[68217]: value = "task-2961439" [ 887.990940] env[68217]: _type = "Task" [ 887.990940] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.003849] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961439, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.020158] env[68217]: DEBUG oslo_vmware.api [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961434, 'name': ReconfigVM_Task, 'duration_secs': 0.753166} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.020804] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Reconfigured VM instance instance-00000024 to attach disk [datastore1] volume-52fa266f-c8e9-4d71-a291-87ae7c831366/volume-52fa266f-c8e9-4d71-a291-87ae7c831366.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.027030] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87020a3b-968f-40e1-8d9d-f3cd791d8c4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.045290] env[68217]: DEBUG oslo_vmware.api [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 888.045290] env[68217]: value = "task-2961440" [ 888.045290] env[68217]: _type = "Task" [ 888.045290] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.057185] env[68217]: DEBUG oslo_vmware.api [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961440, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.088800] env[68217]: DEBUG nova.network.neutron [-] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 888.159938] env[68217]: DEBUG nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 888.173945] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961428, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.433159} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.174340] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/31c3d1c5-dcbd-447b-935c-0ac48e805003/31c3d1c5-dcbd-447b-935c-0ac48e805003.vmdk to [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 888.175207] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfdc1cc5-0253-422e-805a-dea1167f1b48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.199057] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Reconfiguring VM instance instance-00000020 to attach disk [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.201417] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='406811dac611ed63f06fd0b4505553f0',container_format='bare',created_at=2025-03-12T08:19:52Z,direct_url=,disk_format='vmdk',id=9f4edacb-625d-403d-beb9-916f1ffd1cd7,min_disk=1,min_ram=0,name='tempest-test-snap-1383004512',owner='ebfeb38b81794c558c1164cecd7fa221',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2025-03-12T08:20:07Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 888.201694] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 888.201962] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 888.202293] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 888.202493] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 888.202683] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 888.203011] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 888.203138] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 888.203327] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 888.203491] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 888.203862] env[68217]: DEBUG nova.virt.hardware [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 888.204220] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f7c54af6-28b8-4c6d-bc40-f5b847bf932f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.218795] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e99cf3-4424-4186-82de-a580d5f13a84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.229214] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec213841-14de-44a5-bb11-3cdf849bc829 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.233013] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 888.233013] env[68217]: value = "task-2961441" [ 888.233013] env[68217]: _type = "Task" [ 888.233013] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.253085] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961441, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.288211] env[68217]: DEBUG oslo_vmware.api [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961438, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.261309} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.288556] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 888.288822] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 888.289077] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 888.317317] env[68217]: INFO nova.scheduler.client.report [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted allocations for instance 03d61c68-1b37-4172-b276-67a73a0dc228 [ 888.332475] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b366eb36-db42-4bdb-b6e1-47da0bcde8cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.346027] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d28f8045-00c2-4a3d-b258-9ad5995fffde {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.356626] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.221s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.358833] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 4.411s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 888.395022] env[68217]: DEBUG nova.compute.manager [req-4c124598-a52a-49e5-b123-d75cec94a2b3 req-b12cf2ba-0b11-4490-801e-dc7dded2856d service nova] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Detach interface failed, port_id=cdcfcb29-1a21-4c5f-a612-ab70b34a44b1, reason: Instance b7fe971e-353f-427c-896c-32f9de0d70e7 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 888.396444] env[68217]: INFO nova.scheduler.client.report [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Deleted allocations for instance a513976b-4859-4822-8989-c9452db62ee6 [ 888.414077] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961435, 'name': ReconfigVM_Task, 'duration_secs': 0.652737} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.414913] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Reconfigured VM instance instance-0000004b to attach disk [datastore1] ca9ef7ff-b942-4363-a4f8-9163791ec162/ca9ef7ff-b942-4363-a4f8-9163791ec162.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.415059] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7ab3b45e-d7b0-400c-a141-d20227837e0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.423889] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 888.423889] env[68217]: value = "task-2961442" [ 888.423889] env[68217]: _type = "Task" [ 888.423889] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.433388] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961442, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.502171] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961439, 'name': ReconfigVM_Task, 'duration_secs': 0.363019} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.502494] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Reconfigured VM instance instance-0000004d to attach disk [datastore1] a7625a02-993b-4577-8d42-f763858a6154/a7625a02-993b-4577-8d42-f763858a6154.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 888.503185] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2e8a9b02-0363-4516-a2a0-1cd80d9574eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.510916] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 888.510916] env[68217]: value = "task-2961443" [ 888.510916] env[68217]: _type = "Task" [ 888.510916] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.521943] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961443, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.555679] env[68217]: DEBUG oslo_vmware.api [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961440, 'name': ReconfigVM_Task, 'duration_secs': 0.155082} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.555995] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594311', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'name': 'volume-52fa266f-c8e9-4d71-a291-87ae7c831366', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da1524a7-2756-4429-ada2-b1f493544bd2', 'attached_at': '', 'detached_at': '', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'serial': '52fa266f-c8e9-4d71-a291-87ae7c831366'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 888.592198] env[68217]: INFO nova.compute.manager [-] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Took 1.60 seconds to deallocate network for instance. [ 888.744425] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961441, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.822508] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 888.866537] env[68217]: INFO nova.compute.claims [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.908311] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3b9a16ad-3297-4a64-988c-909ee3ed70d8 tempest-ImagesOneServerNegativeTestJSON-1551342174 tempest-ImagesOneServerNegativeTestJSON-1551342174-project-member] Lock "a513976b-4859-4822-8989-c9452db62ee6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.752s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 888.937768] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961442, 'name': Rename_Task, 'duration_secs': 0.387551} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.938127] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 888.938357] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-785ee451-07dc-43e0-b700-d6526cc6e86f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.947402] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 888.947402] env[68217]: value = "task-2961444" [ 888.947402] env[68217]: _type = "Task" [ 888.947402] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.956036] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961444, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.021919] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961443, 'name': Rename_Task, 'duration_secs': 0.462381} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.022246] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.022554] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd93ca70-7f28-4d6e-941c-38a0ffaa20cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.032264] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 889.032264] env[68217]: value = "task-2961445" [ 889.032264] env[68217]: _type = "Task" [ 889.032264] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.042518] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961445, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.100067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.251461] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961441, 'name': ReconfigVM_Task, 'duration_secs': 0.854006} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.251790] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Reconfigured VM instance instance-00000020 to attach disk [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc/58c15727-79ae-404f-a054-d71e3be498cc.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.253633] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6c2cacb4-1a8e-436a-b4a6-62b1f8adf7d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.264369] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 889.264369] env[68217]: value = "task-2961446" [ 889.264369] env[68217]: _type = "Task" [ 889.264369] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.276365] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961446, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.374013] env[68217]: INFO nova.compute.resource_tracker [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating resource usage from migration 9b4da72d-7edd-4b1b-af49-bccd923d81bd [ 889.468705] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961444, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.523199] env[68217]: DEBUG nova.network.neutron [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Successfully updated port: a2616d12-5ede-48a3-8191-248418505394 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 889.548113] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961445, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.610059] env[68217]: DEBUG nova.objects.instance [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lazy-loading 'flavor' on Instance uuid da1524a7-2756-4429-ada2-b1f493544bd2 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 889.777776] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961446, 'name': Rename_Task, 'duration_secs': 0.305236} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.778139] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 889.778410] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f17f4952-8f92-4eca-be17-f91e2258deb6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.785951] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 889.785951] env[68217]: value = "task-2961447" [ 889.785951] env[68217]: _type = "Task" [ 889.785951] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.802283] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961447, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.852788] env[68217]: DEBUG nova.compute.manager [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Received event network-changed-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.853056] env[68217]: DEBUG nova.compute.manager [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Refreshing instance network info cache due to event network-changed-f10ba1e7-ec20-4ece-a5e6-c0e47e42e986. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 889.853309] env[68217]: DEBUG oslo_concurrency.lockutils [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] Acquiring lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.853406] env[68217]: DEBUG oslo_concurrency.lockutils [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] Acquired lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.853596] env[68217]: DEBUG nova.network.neutron [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Refreshing network info cache for port f10ba1e7-ec20-4ece-a5e6-c0e47e42e986 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 889.869672] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c677fbe1-bb9f-4f04-bacb-ed774ff89d8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.876520] env[68217]: DEBUG nova.compute.manager [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Received event network-vif-plugged-a2616d12-5ede-48a3-8191-248418505394 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.876520] env[68217]: DEBUG oslo_concurrency.lockutils [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] Acquiring lock "650ebd16-da81-475e-a82a-7fa5fb2880bc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 889.876700] env[68217]: DEBUG oslo_concurrency.lockutils [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 889.877113] env[68217]: DEBUG oslo_concurrency.lockutils [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 889.877113] env[68217]: DEBUG nova.compute.manager [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] No waiting events found dispatching network-vif-plugged-a2616d12-5ede-48a3-8191-248418505394 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 889.877290] env[68217]: WARNING nova.compute.manager [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Received unexpected event network-vif-plugged-a2616d12-5ede-48a3-8191-248418505394 for instance with vm_state building and task_state spawning. [ 889.877546] env[68217]: DEBUG nova.compute.manager [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Received event network-changed-a2616d12-5ede-48a3-8191-248418505394 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 889.877842] env[68217]: DEBUG nova.compute.manager [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Refreshing instance network info cache due to event network-changed-a2616d12-5ede-48a3-8191-248418505394. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 889.878165] env[68217]: DEBUG oslo_concurrency.lockutils [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] Acquiring lock "refresh_cache-650ebd16-da81-475e-a82a-7fa5fb2880bc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.878414] env[68217]: DEBUG oslo_concurrency.lockutils [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] Acquired lock "refresh_cache-650ebd16-da81-475e-a82a-7fa5fb2880bc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 889.878675] env[68217]: DEBUG nova.network.neutron [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Refreshing network info cache for port a2616d12-5ede-48a3-8191-248418505394 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 889.886759] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57142c68-2a27-4b77-9631-74013cdd55f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.919583] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76f4e4bf-7ebc-47c7-acc7-882cfbc037fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.928121] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75226ccb-0423-4eda-9aa4-a52ef633a7c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.944594] env[68217]: DEBUG nova.compute.provider_tree [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.962392] env[68217]: DEBUG oslo_vmware.api [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961444, 'name': PowerOnVM_Task, 'duration_secs': 0.892168} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.962599] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 889.962788] env[68217]: INFO nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Took 11.46 seconds to spawn the instance on the hypervisor. [ 889.962988] env[68217]: DEBUG nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 889.963765] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f0a185a-9cf7-4709-a12c-7828db85c14f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.026858] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "refresh_cache-650ebd16-da81-475e-a82a-7fa5fb2880bc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.043938] env[68217]: DEBUG oslo_vmware.api [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961445, 'name': PowerOnVM_Task, 'duration_secs': 0.716376} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.044248] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 890.044452] env[68217]: INFO nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Took 8.87 seconds to spawn the instance on the hypervisor. [ 890.044628] env[68217]: DEBUG nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 890.045481] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14ff518b-139d-4477-b5a6-4dca58049c66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.059545] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "03d61c68-1b37-4172-b276-67a73a0dc228" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.113256] env[68217]: DEBUG oslo_concurrency.lockutils [None req-50cbadde-a868-40cb-a1b9-675b13f51854 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.370s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.297385] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961447, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.416882] env[68217]: DEBUG nova.network.neutron [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 890.448332] env[68217]: DEBUG nova.scheduler.client.report [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 890.482840] env[68217]: INFO nova.compute.manager [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Took 26.57 seconds to build instance. [ 890.511336] env[68217]: DEBUG nova.network.neutron [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.564961] env[68217]: INFO nova.compute.manager [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Took 23.69 seconds to build instance. [ 890.684432] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "da1524a7-2756-4429-ada2-b1f493544bd2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.684432] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.686260] env[68217]: DEBUG nova.network.neutron [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Updated VIF entry in instance network info cache for port f10ba1e7-ec20-4ece-a5e6-c0e47e42e986. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 890.686579] env[68217]: DEBUG nova.network.neutron [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Updating instance_info_cache with network_info: [{"id": "f10ba1e7-ec20-4ece-a5e6-c0e47e42e986", "address": "fa:16:3e:bd:30:8a", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": null, "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapf10ba1e7-ec", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.747542] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.748047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.748313] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 890.748547] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.748783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.751061] env[68217]: INFO nova.compute.manager [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Terminating instance [ 890.787628] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] Acquiring lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.787820] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] Acquired lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 890.788059] env[68217]: DEBUG nova.network.neutron [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 890.803463] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961447, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.953504] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.595s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 890.954148] env[68217]: INFO nova.compute.manager [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Migrating [ 890.961091] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 4.501s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 890.976352] env[68217]: WARNING oslo_messaging._drivers.amqpdriver [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 890.986988] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9e5887c5-4d06-4458-9d66-708d079312d2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.090s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.013656] env[68217]: DEBUG oslo_concurrency.lockutils [req-098ae681-ff87-419a-b6d1-0c09eda4a3ac req-c00471aa-7b60-48eb-ba2e-0863411b2c77 service nova] Releasing lock "refresh_cache-650ebd16-da81-475e-a82a-7fa5fb2880bc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.014227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "refresh_cache-650ebd16-da81-475e-a82a-7fa5fb2880bc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.014452] env[68217]: DEBUG nova.network.neutron [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.067435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2ec070eb-5e81-4fa0-a098-2ce25dd82434 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "a7625a02-993b-4577-8d42-f763858a6154" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.208s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 891.191050] env[68217]: INFO nova.compute.manager [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Detaching volume 52fa266f-c8e9-4d71-a291-87ae7c831366 [ 891.193168] env[68217]: DEBUG oslo_concurrency.lockutils [req-633d4e89-9a25-4905-99cf-7307bc5f0ff8 req-e6a741ff-672b-41ef-84a3-e4a15f197a85 service nova] Releasing lock "refresh_cache-03d61c68-1b37-4172-b276-67a73a0dc228" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 891.230103] env[68217]: INFO nova.virt.block_device [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Attempting to driver detach volume 52fa266f-c8e9-4d71-a291-87ae7c831366 from mountpoint /dev/sdb [ 891.230351] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 891.230538] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594311', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'name': 'volume-52fa266f-c8e9-4d71-a291-87ae7c831366', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da1524a7-2756-4429-ada2-b1f493544bd2', 'attached_at': '', 'detached_at': '', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'serial': '52fa266f-c8e9-4d71-a291-87ae7c831366'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 891.231608] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9034c5c-5eb8-45f4-af38-916ba6e200a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.254236] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbaab608-ecbb-444e-b8fd-98e6727f14af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.257427] env[68217]: DEBUG nova.compute.manager [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 891.257735] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 891.258361] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9287f94c-0760-45e6-a511-75277db380be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.268446] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf30e91-8674-47b3-83bf-9360a364e017 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.271133] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 891.271476] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b4734aa7-4f06-4d96-9bd7-9701844c1f2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.282347] env[68217]: DEBUG oslo_vmware.api [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 891.282347] env[68217]: value = "task-2961448" [ 891.282347] env[68217]: _type = "Task" [ 891.282347] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.311435] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549e7c93-b0a0-48bd-9820-1b7c8859a9c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.320026] env[68217]: DEBUG oslo_vmware.api [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961447, 'name': PowerOnVM_Task, 'duration_secs': 1.052252} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.333801] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.336035] env[68217]: INFO nova.compute.manager [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Rebuilding instance [ 891.338128] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] The volume has not been displaced from its original location: [datastore1] volume-52fa266f-c8e9-4d71-a291-87ae7c831366/volume-52fa266f-c8e9-4d71-a291-87ae7c831366.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 891.342993] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Reconfiguring VM instance instance-00000024 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 891.343342] env[68217]: DEBUG oslo_vmware.api [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961448, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.343919] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-474e7022-c892-4690-b12a-a9454bf666c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.368866] env[68217]: DEBUG oslo_vmware.api [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 891.368866] env[68217]: value = "task-2961449" [ 891.368866] env[68217]: _type = "Task" [ 891.368866] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.381101] env[68217]: DEBUG oslo_vmware.api [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961449, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.410538] env[68217]: DEBUG nova.compute.manager [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 891.411393] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e85382c9-81f6-47e9-bc02-2559fa3d5d03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.483682] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.484043] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 891.484267] env[68217]: DEBUG nova.network.neutron [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 891.486641] env[68217]: DEBUG nova.compute.manager [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 891.488148] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c508b078-041a-417b-9eca-2b930dc4177c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.567525] env[68217]: DEBUG nova.network.neutron [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 891.789073] env[68217]: DEBUG nova.network.neutron [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Updating instance_info_cache with network_info: [{"id": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "address": "fa:16:3e:5b:2a:7a", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5002fb09-dd", "ovs_interfaceid": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.812719] env[68217]: DEBUG oslo_vmware.api [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961448, 'name': PowerOffVM_Task, 'duration_secs': 0.215751} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.812994] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 891.813185] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 891.814199] env[68217]: DEBUG nova.network.neutron [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Updating instance_info_cache with network_info: [{"id": "a2616d12-5ede-48a3-8191-248418505394", "address": "fa:16:3e:91:fe:23", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2616d12-5e", "ovs_interfaceid": "a2616d12-5ede-48a3-8191-248418505394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.815691] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-af360efa-9ed8-41f6-9f8b-8b351fdbab87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.882891] env[68217]: DEBUG oslo_vmware.api [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961449, 'name': ReconfigVM_Task, 'duration_secs': 0.364328} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.886121] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Reconfigured VM instance instance-00000024 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 891.893176] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc8d4df0-075b-4c16-9f47-34beced891b8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.908111] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 891.908111] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 891.908111] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Deleting the datastore file [datastore2] e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 891.910613] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9b92a88-61eb-465e-ade5-7322fb5390f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.916359] env[68217]: DEBUG oslo_vmware.api [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 891.916359] env[68217]: value = "task-2961451" [ 891.916359] env[68217]: _type = "Task" [ 891.916359] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.921710] env[68217]: DEBUG oslo_vmware.api [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for the task: (returnval){ [ 891.921710] env[68217]: value = "task-2961452" [ 891.921710] env[68217]: _type = "Task" [ 891.921710] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.931966] env[68217]: DEBUG oslo_vmware.api [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961451, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.946166] env[68217]: DEBUG oslo_vmware.api [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961452, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.967498] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-959a8404-852f-4ff2-988e-8b540fa19693 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.982481] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8673a5df-9cf0-493c-afc7-9def9bb4942c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.023932] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee7608e-e681-4d3c-851d-d6b4a37eca07 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.030261] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9cb88d92-8d65-4d82-b5c7-73e3ddaaa643 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 31.239s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.036151] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54ea825-2234-493a-a03f-69c7f055ec72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.051206] env[68217]: DEBUG nova.compute.provider_tree [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 892.291711] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] Releasing lock "refresh_cache-ca9ef7ff-b942-4363-a4f8-9163791ec162" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.292033] env[68217]: DEBUG nova.compute.manager [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Inject network info {{(pid=68217) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7737}} [ 892.293625] env[68217]: DEBUG nova.compute.manager [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] network_info to inject: |[{"id": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "address": "fa:16:3e:5b:2a:7a", "network": {"id": "1800eb48-065a-40ff-aa65-26727eaad0cb", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-419697147-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c9ba7843e6144cd1877b48bc40cd64f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1323cb03-8367-485a-962e-131af8eba474", "external-id": "nsx-vlan-transportzone-41", "segmentation_id": 41, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5002fb09-dd", "ovs_interfaceid": "5002fb09-ddc6-4497-a55f-8cfe415c4d70", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7738}} [ 892.298211] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Reconfiguring VM instance to set the machine id {{(pid=68217) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 892.298211] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d575eed-c910-4c0c-8c85-6e18f83c1c50 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.315721] env[68217]: DEBUG oslo_vmware.api [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] Waiting for the task: (returnval){ [ 892.315721] env[68217]: value = "task-2961453" [ 892.315721] env[68217]: _type = "Task" [ 892.315721] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.318813] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "refresh_cache-650ebd16-da81-475e-a82a-7fa5fb2880bc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.319176] env[68217]: DEBUG nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Instance network_info: |[{"id": "a2616d12-5ede-48a3-8191-248418505394", "address": "fa:16:3e:91:fe:23", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa2616d12-5e", "ovs_interfaceid": "a2616d12-5ede-48a3-8191-248418505394", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 892.319608] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:91:fe:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a2616d12-5ede-48a3-8191-248418505394', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 892.329347] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.331044] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 892.331366] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b2796185-c9bb-4c2b-b426-e4023959cc5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.354427] env[68217]: DEBUG oslo_vmware.api [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] Task: {'id': task-2961453, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.365283] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 892.365283] env[68217]: value = "task-2961454" [ 892.365283] env[68217]: _type = "Task" [ 892.365283] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.374505] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961454, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.378181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "a7625a02-993b-4577-8d42-f763858a6154" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.378904] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "a7625a02-993b-4577-8d42-f763858a6154" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.378904] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "a7625a02-993b-4577-8d42-f763858a6154-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 892.378904] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "a7625a02-993b-4577-8d42-f763858a6154-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 892.379160] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "a7625a02-993b-4577-8d42-f763858a6154-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 892.383135] env[68217]: INFO nova.compute.manager [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Terminating instance [ 892.394606] env[68217]: DEBUG nova.network.neutron [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance_info_cache with network_info: [{"id": "747300c0-a758-483f-ba39-99efe6e731ec", "address": "fa:16:3e:2a:04:4a", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap747300c0-a7", "ovs_interfaceid": "747300c0-a758-483f-ba39-99efe6e731ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.432732] env[68217]: DEBUG oslo_vmware.api [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961451, 'name': ReconfigVM_Task, 'duration_secs': 0.18227} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.433584] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594311', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'name': 'volume-52fa266f-c8e9-4d71-a291-87ae7c831366', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'da1524a7-2756-4429-ada2-b1f493544bd2', 'attached_at': '', 'detached_at': '', 'volume_id': '52fa266f-c8e9-4d71-a291-87ae7c831366', 'serial': '52fa266f-c8e9-4d71-a291-87ae7c831366'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 892.440228] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.440756] env[68217]: DEBUG oslo_vmware.api [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Task: {'id': task-2961452, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.157148} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.441246] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b3819e3-ba0d-49c1-b67e-0d10400ed4ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.443212] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 892.443529] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 892.443919] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 892.444201] env[68217]: INFO nova.compute.manager [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Took 1.19 seconds to destroy the instance on the hypervisor. [ 892.444538] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 892.444904] env[68217]: DEBUG nova.compute.manager [-] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 892.445013] env[68217]: DEBUG nova.network.neutron [-] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 892.456477] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 892.456477] env[68217]: value = "task-2961455" [ 892.456477] env[68217]: _type = "Task" [ 892.456477] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.474242] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961455, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.555405] env[68217]: DEBUG nova.scheduler.client.report [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 892.829458] env[68217]: DEBUG oslo_vmware.api [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] Task: {'id': task-2961453, 'name': ReconfigVM_Task, 'duration_secs': 0.202231} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.829771] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb017ae9-8156-49f7-8eb4-846daa07368e tempest-ServersAdminTestJSON-2060402749 tempest-ServersAdminTestJSON-2060402749-project-admin] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Reconfigured VM instance to set the machine id {{(pid=68217) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 892.879987] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961454, 'name': CreateVM_Task, 'duration_secs': 0.463737} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.879987] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 892.879987] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.879987] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 892.880146] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 892.880538] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-485dec60-94cf-40bb-a010-3f37220fa624 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.888299] env[68217]: DEBUG nova.compute.manager [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 892.888299] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 892.888591] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 892.888591] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522a10ef-56cd-e71b-cca8-ff2bc9713dc2" [ 892.888591] env[68217]: _type = "Task" [ 892.888591] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.889716] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfcc897-8707-49d6-a756-68630a90e49a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.898856] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 892.906221] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.906576] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8a53ffa8-9aa0-4690-94a1-7c1332cfb15f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.917329] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522a10ef-56cd-e71b-cca8-ff2bc9713dc2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.919625] env[68217]: DEBUG oslo_vmware.api [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 892.919625] env[68217]: value = "task-2961456" [ 892.919625] env[68217]: _type = "Task" [ 892.919625] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.931831] env[68217]: DEBUG oslo_vmware.api [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961456, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.970607] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961455, 'name': PowerOffVM_Task, 'duration_secs': 0.291712} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.970915] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 892.971700] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 892.971914] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-52ea7e36-87d9-4533-892d-67af9c35d19c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.980129] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 892.980129] env[68217]: value = "task-2961457" [ 892.980129] env[68217]: _type = "Task" [ 892.980129] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.990470] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961457, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.035598] env[68217]: DEBUG nova.objects.instance [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lazy-loading 'flavor' on Instance uuid da1524a7-2756-4429-ada2-b1f493544bd2 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.281126] env[68217]: DEBUG nova.compute.manager [req-8a9f5cb6-ed98-4149-b13b-09395169cde2 req-28b9eed5-1bb5-4b71-a30a-37a11b927d7e service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Received event network-vif-deleted-b687a815-30c5-4ac1-aed3-a25a04a96474 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 893.281264] env[68217]: INFO nova.compute.manager [req-8a9f5cb6-ed98-4149-b13b-09395169cde2 req-28b9eed5-1bb5-4b71-a30a-37a11b927d7e service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Neutron deleted interface b687a815-30c5-4ac1-aed3-a25a04a96474; detaching it from the instance and deleting it from the info cache [ 893.281440] env[68217]: DEBUG nova.network.neutron [req-8a9f5cb6-ed98-4149-b13b-09395169cde2 req-28b9eed5-1bb5-4b71-a30a-37a11b927d7e service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.410248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 893.410248] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Processing image 9f4edacb-625d-403d-beb9-916f1ffd1cd7 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 893.410248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7/9f4edacb-625d-403d-beb9-916f1ffd1cd7.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.410248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7/9f4edacb-625d-403d-beb9-916f1ffd1cd7.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 893.410248] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 893.411648] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-158e15b2-031b-4607-8e5b-5a2fe79cf826 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.428040] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 893.428291] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 893.430285] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a713834-43ee-4146-bf6b-2130eaf6f6e5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.437918] env[68217]: DEBUG oslo_vmware.api [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961456, 'name': PowerOffVM_Task, 'duration_secs': 0.236608} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.438554] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 893.438734] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 893.438981] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-718e84a9-08fc-4a48-8635-3b73c58682a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.441866] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 893.441866] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5261bbca-24f7-b41a-e05f-fe9a48844f1c" [ 893.441866] env[68217]: _type = "Task" [ 893.441866] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.453058] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5261bbca-24f7-b41a-e05f-fe9a48844f1c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.492993] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 893.492993] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 893.492993] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594242', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'name': 'volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95e625e9-a726-4c3c-be66-7b8ce93b5f8a', 'attached_at': '', 'detached_at': '', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'serial': '19dc7d9f-d50d-45f0-8776-4c28a20691ad'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 893.493210] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a481d98-a3c0-46b5-8ed6-090f7f99060a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.516221] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7293fc4d-c7e4-47af-978d-48feaaf8dffd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.520655] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 893.520655] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 893.520655] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleting the datastore file [datastore1] a7625a02-993b-4577-8d42-f763858a6154 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 893.520655] env[68217]: DEBUG nova.network.neutron [-] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 893.520901] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-549eaf94-02a0-40a9-8f30-2dc077189fac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.528667] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-596202c2-0088-4e55-a32f-c996f9ad78d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.533700] env[68217]: DEBUG oslo_vmware.api [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 893.533700] env[68217]: value = "task-2961459" [ 893.533700] env[68217]: _type = "Task" [ 893.533700] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.557836] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e31b03-1b82-438b-be6e-3235949e86b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.565363] env[68217]: DEBUG oslo_vmware.api [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961459, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.566555] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.606s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 893.572314] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.397s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 893.572314] env[68217]: DEBUG nova.objects.instance [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lazy-loading 'resources' on Instance uuid e550084b-84dd-4ae8-8667-2edb45b49e2b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 893.585553] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] The volume has not been displaced from its original location: [datastore1] volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad/volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 893.592695] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Reconfiguring VM instance instance-0000003e to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 893.597085] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12fdefe9-e383-45b9-a366-77522296c4cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.619914] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 893.619914] env[68217]: value = "task-2961460" [ 893.619914] env[68217]: _type = "Task" [ 893.619914] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.642269] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961460, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.784718] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9b2cf004-b272-431d-9ca0-2808f850e54e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.797406] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892c3508-7282-4c60-88e4-1c0022a43f13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.847179] env[68217]: DEBUG nova.compute.manager [req-8a9f5cb6-ed98-4149-b13b-09395169cde2 req-28b9eed5-1bb5-4b71-a30a-37a11b927d7e service nova] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Detach interface failed, port_id=b687a815-30c5-4ac1-aed3-a25a04a96474, reason: Instance e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 893.955659] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Preparing fetch location {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 893.956076] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Fetch image to [datastore2] OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74/OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74.vmdk {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 893.956386] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Downloading stream optimized image 9f4edacb-625d-403d-beb9-916f1ffd1cd7 to [datastore2] OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74/OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74.vmdk on the data store datastore2 as vApp {{(pid=68217) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 893.956722] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Downloading image file data 9f4edacb-625d-403d-beb9-916f1ffd1cd7 to the ESX as VM named 'OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74' {{(pid=68217) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 894.024932] env[68217]: INFO nova.compute.manager [-] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Took 1.58 seconds to deallocate network for instance. [ 894.055117] env[68217]: DEBUG oslo_vmware.api [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961459, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.167259} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.056474] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 894.056474] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 894.056474] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 894.056474] env[68217]: INFO nova.compute.manager [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: a7625a02-993b-4577-8d42-f763858a6154] Took 1.17 seconds to destroy the instance on the hypervisor. [ 894.056617] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 894.056718] env[68217]: DEBUG nova.compute.manager [-] [instance: a7625a02-993b-4577-8d42-f763858a6154] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 894.056802] env[68217]: DEBUG nova.network.neutron [-] [instance: a7625a02-993b-4577-8d42-f763858a6154] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 894.059859] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 894.059859] env[68217]: value = "resgroup-9" [ 894.059859] env[68217]: _type = "ResourcePool" [ 894.059859] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 894.060103] env[68217]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-78afce6d-12ac-4ad2-9e4f-bce36f5541c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.078713] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0a9f89c-10b0-4eff-8f2a-8b90360bf1f2 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.394s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.088419] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease: (returnval){ [ 894.088419] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb6e13-d653-c52f-8f87-040d471822be" [ 894.088419] env[68217]: _type = "HttpNfcLease" [ 894.088419] env[68217]: } obtained for vApp import into resource pool (val){ [ 894.088419] env[68217]: value = "resgroup-9" [ 894.088419] env[68217]: _type = "ResourcePool" [ 894.088419] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 894.088704] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the lease: (returnval){ [ 894.088704] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb6e13-d653-c52f-8f87-040d471822be" [ 894.088704] env[68217]: _type = "HttpNfcLease" [ 894.088704] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 894.102278] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 894.102278] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb6e13-d653-c52f-8f87-040d471822be" [ 894.102278] env[68217]: _type = "HttpNfcLease" [ 894.102278] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 894.138961] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961460, 'name': ReconfigVM_Task, 'duration_secs': 0.269838} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.143414] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Reconfigured VM instance instance-0000003e to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 894.151132] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca2fb60d-d75c-4b6d-804c-bdc4d0dd5298 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.171324] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ab63726-c610-4e2a-9deb-d80973798fa9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.183569] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57a8f646-f6b7-45e4-a3d8-5e6345011100 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.187320] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 894.187320] env[68217]: value = "task-2961462" [ 894.187320] env[68217]: _type = "Task" [ 894.187320] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.188303] env[68217]: INFO nova.scheduler.client.report [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted allocation for migration 09e5617b-3f61-4244-8c01-a6a0f8233b59 [ 894.231150] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d72be7e-1f7c-415e-b6f0-03ae81fd1685 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.237902] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961462, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.243736] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc070bc3-c057-4ca7-ad11-e097a88f4f1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.258876] env[68217]: DEBUG nova.compute.provider_tree [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 894.421954] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1d7e3b-a985-4f7e-94c5-d29da767e525 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.448532] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance 'fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 894.453414] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6fd9503-9edf-405f-86f1-397e340e1493 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.461528] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-afda133c-f538-43ec-8d3b-ac1af411005c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Suspending the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 894.461801] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-faedde08-73d2-42a1-83aa-0cc105047e4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.471077] env[68217]: DEBUG oslo_vmware.api [None req-afda133c-f538-43ec-8d3b-ac1af411005c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 894.471077] env[68217]: value = "task-2961463" [ 894.471077] env[68217]: _type = "Task" [ 894.471077] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.480719] env[68217]: DEBUG oslo_vmware.api [None req-afda133c-f538-43ec-8d3b-ac1af411005c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961463, 'name': SuspendVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.532896] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 894.599147] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 894.599147] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb6e13-d653-c52f-8f87-040d471822be" [ 894.599147] env[68217]: _type = "HttpNfcLease" [ 894.599147] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 894.625976] env[68217]: INFO nova.compute.manager [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Rebuilding instance [ 894.701634] env[68217]: DEBUG nova.compute.manager [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 894.702607] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d4f39e0-5ccb-4a49-87b3-a2d1e26bd582 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.708889] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961462, 'name': ReconfigVM_Task, 'duration_secs': 0.153989} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.709722] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594242', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'name': 'volume-19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95e625e9-a726-4c3c-be66-7b8ce93b5f8a', 'attached_at': '', 'detached_at': '', 'volume_id': '19dc7d9f-d50d-45f0-8776-4c28a20691ad', 'serial': '19dc7d9f-d50d-45f0-8776-4c28a20691ad'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 894.710422] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 894.711363] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567e1c88-2b2a-48ae-9aec-0a472f3ec71c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.730930] env[68217]: DEBUG oslo_concurrency.lockutils [None req-05fc0e64-da2a-4aa3-990d-0b38206a303a tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.021s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 894.732203] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 894.733028] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e7dead8-7f1f-4f7c-993e-bc3700610940 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.763316] env[68217]: DEBUG nova.scheduler.client.report [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 894.840874] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 894.840874] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 894.840874] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Deleting the datastore file [datastore1] 95e625e9-a726-4c3c-be66-7b8ce93b5f8a {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 894.840874] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ba5e1369-97ed-4ee1-abe5-5983abad19c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.851694] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for the task: (returnval){ [ 894.851694] env[68217]: value = "task-2961465" [ 894.851694] env[68217]: _type = "Task" [ 894.851694] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.859956] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961465, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.912832] env[68217]: DEBUG nova.network.neutron [-] [instance: a7625a02-993b-4577-8d42-f763858a6154] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.958432] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 894.958682] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-31a3e916-51fc-44f5-a7c3-adf08818e454 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.967801] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 894.967801] env[68217]: value = "task-2961466" [ 894.967801] env[68217]: _type = "Task" [ 894.967801] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.981802] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961466, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.985780] env[68217]: DEBUG oslo_vmware.api [None req-afda133c-f538-43ec-8d3b-ac1af411005c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961463, 'name': SuspendVM_Task} progress is 58%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.105346] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 895.105346] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb6e13-d653-c52f-8f87-040d471822be" [ 895.105346] env[68217]: _type = "HttpNfcLease" [ 895.105346] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 895.105920] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 895.105920] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb6e13-d653-c52f-8f87-040d471822be" [ 895.105920] env[68217]: _type = "HttpNfcLease" [ 895.105920] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 895.106827] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-534e74a9-f04f-4df9-aa1e-32d07f5f8c25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.116509] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528ff532-1505-1232-e54e-ef100289a9a3/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 895.116765] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528ff532-1505-1232-e54e-ef100289a9a3/disk-0.vmdk. {{(pid=68217) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 895.184800] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-4676f66b-eb5d-4fd9-a848-0178e43beb6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.270998] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.700s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.273359] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.451s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.273946] env[68217]: DEBUG nova.objects.instance [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lazy-loading 'resources' on Instance uuid 03d61c68-1b37-4172-b276-67a73a0dc228 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.313269] env[68217]: INFO nova.scheduler.client.report [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Deleted allocations for instance e550084b-84dd-4ae8-8667-2edb45b49e2b [ 895.351452] env[68217]: DEBUG nova.compute.manager [req-2816d12d-d98a-43c3-9645-c2be9c5be555 req-d74af39f-0860-4d30-9720-2ba6c05eb44e service nova] [instance: a7625a02-993b-4577-8d42-f763858a6154] Received event network-vif-deleted-4f18a3c9-df30-45b6-9176-4c99f80d5bad {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 895.364675] env[68217]: DEBUG oslo_vmware.api [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Task: {'id': task-2961465, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090338} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.365616] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 895.365819] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 895.366042] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 895.391489] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 895.392866] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 895.415811] env[68217]: INFO nova.compute.manager [-] [instance: a7625a02-993b-4577-8d42-f763858a6154] Took 1.36 seconds to deallocate network for instance. [ 895.439616] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 895.439972] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73db040c-f965-409f-a126-c3a69219ff6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.451344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-243632e3-2366-4cdc-97d7-d4e154e7148a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.496950] env[68217]: ERROR nova.compute.manager [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Failed to detach volume 19dc7d9f-d50d-45f0-8776-4c28a20691ad from /dev/sda: nova.exception.InstanceNotFound: Instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a could not be found. [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Traceback (most recent call last): [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self.driver.rebuild(**kwargs) [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] raise NotImplementedError() [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] NotImplementedError [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] During handling of the above exception, another exception occurred: [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Traceback (most recent call last): [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 895.496950] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self.driver.detach_volume(context, old_connection_info, [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] return self._volumeops.detach_volume(connection_info, instance) [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self._detach_volume_vmdk(connection_info, instance) [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] stable_ref.fetch_moref(session) [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] nova.exception.InstanceNotFound: Instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a could not be found. [ 895.497617] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.510239] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961466, 'name': PowerOffVM_Task, 'duration_secs': 0.33654} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.510239] env[68217]: DEBUG oslo_vmware.api [None req-afda133c-f538-43ec-8d3b-ac1af411005c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961463, 'name': SuspendVM_Task, 'duration_secs': 1.00138} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.510239] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 895.510239] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance 'fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 895.513245] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-afda133c-f538-43ec-8d3b-ac1af411005c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Suspended the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 895.513505] env[68217]: DEBUG nova.compute.manager [None req-afda133c-f538-43ec-8d3b-ac1af411005c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 895.515028] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20f26b04-f746-4120-a6cc-2e6c27dd562d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.674167] env[68217]: DEBUG nova.compute.utils [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Build of instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a aborted: Failed to rebuild volume backed instance. {{(pid=68217) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:431}} [ 895.677477] env[68217]: ERROR nova.compute.manager [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a aborted: Failed to rebuild volume backed instance. [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Traceback (most recent call last): [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 4185, in _do_rebuild_instance [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self.driver.rebuild(**kwargs) [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/driver.py", line 497, in rebuild [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] raise NotImplementedError() [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] NotImplementedError [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] During handling of the above exception, another exception occurred: [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Traceback (most recent call last): [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3643, in _rebuild_volume_backed_instance [ 895.677477] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self._detach_root_volume(context, instance, root_bdm) [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3622, in _detach_root_volume [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] with excutils.save_and_reraise_exception(): [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self.force_reraise() [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] raise self.value [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3608, in _detach_root_volume [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self.driver.detach_volume(context, old_connection_info, [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 561, in detach_volume [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] return self._volumeops.detach_volume(connection_info, instance) [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 895.677883] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self._detach_volume_vmdk(connection_info, instance) [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] stable_ref.fetch_moref(session) [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] raise exception.InstanceNotFound(instance_id=self._uuid) [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] nova.exception.InstanceNotFound: Instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a could not be found. [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] During handling of the above exception, another exception occurred: [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Traceback (most recent call last): [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 11471, in _error_out_instance_on_exception [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] yield [ 895.678221] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3911, in rebuild_instance [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self._do_rebuild_instance_with_claim( [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3997, in _do_rebuild_instance_with_claim [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self._do_rebuild_instance( [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 4189, in _do_rebuild_instance [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self._rebuild_default_impl(**kwargs) [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3766, in _rebuild_default_impl [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] self._rebuild_volume_backed_instance( [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] File "/opt/stack/nova/nova/compute/manager.py", line 3658, in _rebuild_volume_backed_instance [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] raise exception.BuildAbortException( [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] nova.exception.BuildAbortException: Build of instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a aborted: Failed to rebuild volume backed instance. [ 895.678604] env[68217]: ERROR nova.compute.manager [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] [ 895.727456] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 895.727822] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3980a787-bae9-4306-bc84-b555a9bea74c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.740871] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 895.740871] env[68217]: value = "task-2961467" [ 895.740871] env[68217]: _type = "Task" [ 895.740871] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.752045] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961467, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.776279] env[68217]: DEBUG nova.objects.instance [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lazy-loading 'numa_topology' on Instance uuid 03d61c68-1b37-4172-b276-67a73a0dc228 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 895.821471] env[68217]: DEBUG oslo_concurrency.lockutils [None req-bfbbc515-851a-4b17-910f-f45be65d8415 tempest-ServerRescueTestJSON-1280874011 tempest-ServerRescueTestJSON-1280874011-project-member] Lock "e550084b-84dd-4ae8-8667-2edb45b49e2b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.271s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 895.897320] env[68217]: DEBUG nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 895.923600] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.017156] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 896.017478] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 896.017656] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 896.017897] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 896.018093] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 896.018282] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 896.018523] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 896.018715] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 896.018915] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 896.019151] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 896.019350] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 896.030560] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e7adf01-25e4-4ab1-922a-9c391ef4329e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.057882] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 896.057882] env[68217]: value = "task-2961468" [ 896.057882] env[68217]: _type = "Task" [ 896.057882] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.075524] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961468, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.148038] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Completed reading data from the image iterator. {{(pid=68217) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 896.148298] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528ff532-1505-1232-e54e-ef100289a9a3/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 896.149563] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-798b7518-f768-470f-8cf7-738eef77215e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.159030] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528ff532-1505-1232-e54e-ef100289a9a3/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 896.159030] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528ff532-1505-1232-e54e-ef100289a9a3/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 896.159170] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1ee0a9de-c8c4-468e-952e-0354fe342a2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.254250] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961467, 'name': PowerOffVM_Task, 'duration_secs': 0.211761} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.257016] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 896.257016] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 896.257016] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7162913f-ef9b-405f-ab8f-40881b6614b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.263732] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 896.264244] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31714a6b-7a14-41ae-846e-87ae61c79f59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.279250] env[68217]: DEBUG nova.objects.base [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Object Instance<03d61c68-1b37-4172-b276-67a73a0dc228> lazy-loaded attributes: resources,numa_topology {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 896.353174] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 896.353174] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 896.353174] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleting the datastore file [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 896.353174] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3bd3c6c8-e0f2-492c-a8ba-253113e4b16a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.364023] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 896.364023] env[68217]: value = "task-2961470" [ 896.364023] env[68217]: _type = "Task" [ 896.364023] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.375373] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961470, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.394953] env[68217]: DEBUG oslo_vmware.rw_handles [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/528ff532-1505-1232-e54e-ef100289a9a3/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 896.394953] env[68217]: INFO nova.virt.vmwareapi.images [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Downloaded image file data 9f4edacb-625d-403d-beb9-916f1ffd1cd7 [ 896.394953] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3ebc49-486f-4ab5-8901-715a8e457560 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.422109] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e0e9f3cc-248c-42bc-8132-908fc9c731fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.444743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.453047] env[68217]: INFO nova.virt.vmwareapi.images [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] The imported VM was unregistered [ 896.458160] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Caching image {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 896.458160] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating directory with path [datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 896.458160] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a9d338dc-a8ef-4bea-9492-981563e5ce0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.468514] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created directory with path [datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 896.468723] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74/OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74.vmdk to [datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7/9f4edacb-625d-403d-beb9-916f1ffd1cd7.vmdk. {{(pid=68217) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 896.468986] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-6225a14d-9a84-4810-95f3-b4bd8a85e823 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.478070] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 896.478070] env[68217]: value = "task-2961472" [ 896.478070] env[68217]: _type = "Task" [ 896.478070] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.487357] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961472, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.570659] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961468, 'name': ReconfigVM_Task, 'duration_secs': 0.277656} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.573397] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance 'fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 896.640456] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "2612f6fc-a43f-4011-8a09-51088a49371a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.640771] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.641001] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 896.644218] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 896.644218] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 896.644218] env[68217]: INFO nova.compute.manager [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Terminating instance [ 896.757356] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e2990a-7585-4e31-a354-ee8e9030ae8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.766539] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a0e704-5f9e-4f62-bff6-2df314aae89d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.802059] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3971f444-6993-4986-9708-b9b97afc714a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.812035] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e887e8ce-e9aa-4407-bf38-3916bf4f3926 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.832492] env[68217]: DEBUG nova.compute.provider_tree [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.879793] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961470, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174337} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.880467] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 896.880467] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 896.880605] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 896.993398] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961472, 'name': MoveVirtualDisk_Task} progress is 21%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.080959] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:19:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b6918665-ab7d-45a4-86f9-01de99934033',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-847543468',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 897.081882] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.081882] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.081882] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.082138] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.082293] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 897.082553] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 897.082901] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 897.083097] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 897.083541] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 897.083646] env[68217]: DEBUG nova.virt.hardware [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 897.090058] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfiguring VM instance instance-0000004a to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 897.090397] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b98aa0b5-2c1a-43dc-ba6f-d90134dce517 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.112669] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 897.112669] env[68217]: value = "task-2961473" [ 897.112669] env[68217]: _type = "Task" [ 897.112669] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.123460] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961473, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.152832] env[68217]: DEBUG nova.compute.manager [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 897.153118] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 897.154014] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5872b38c-36fa-4d53-9847-813721df654f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.165859] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 897.165859] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fefe53fb-71e9-4f39-9e5f-602c7be0eb76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.174228] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 897.174228] env[68217]: value = "task-2961474" [ 897.174228] env[68217]: _type = "Task" [ 897.174228] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.184788] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961474, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.274193] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "35817c87-0c55-49bd-917a-59bd39de663c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.274473] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.339386] env[68217]: DEBUG nova.scheduler.client.report [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 897.481387] env[68217]: INFO nova.compute.manager [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Resuming [ 897.482051] env[68217]: DEBUG nova.objects.instance [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lazy-loading 'flavor' on Instance uuid 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.495176] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961472, 'name': MoveVirtualDisk_Task} progress is 40%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.625946] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961473, 'name': ReconfigVM_Task, 'duration_secs': 0.479519} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.626104] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfigured VM instance instance-0000004a to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 897.627128] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba89e4ce-a43d-4e6c-83c9-5ba3e3f0e576 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.660832] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 897.666076] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-17b5bc9b-53f7-4506-911a-8f725e957b2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.695043] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961474, 'name': PowerOffVM_Task, 'duration_secs': 0.424754} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.697596] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 897.699015] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 897.699015] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 897.699302] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 897.699302] env[68217]: value = "task-2961475" [ 897.699302] env[68217]: _type = "Task" [ 897.699302] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.700273] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-00c78a1f-6fc6-4f2c-8236-763429b0cedd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.714297] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961475, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.777866] env[68217]: DEBUG nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 897.805233] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 897.805505] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 897.805774] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleting the datastore file [datastore2] 2612f6fc-a43f-4011-8a09-51088a49371a {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 897.806091] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b1d6576-1cce-46c5-b404-6396684d062d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.815611] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 897.815611] env[68217]: value = "task-2961477" [ 897.815611] env[68217]: _type = "Task" [ 897.815611] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.825473] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.845658] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.572s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 897.848943] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.749s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 897.849269] env[68217]: DEBUG nova.objects.instance [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lazy-loading 'resources' on Instance uuid b7fe971e-353f-427c-896c-32f9de0d70e7 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 897.921743] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 897.922027] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 897.922203] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 897.922547] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 897.922896] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 897.923192] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 897.923582] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 897.923650] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 897.924023] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 897.924263] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 897.924525] env[68217]: DEBUG nova.virt.hardware [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 897.925636] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ad9ff7-6aef-4165-b364-0115e538d4d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.937404] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e50786b9-760f-40b0-b33c-3cee530ea1c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.955138] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:9d:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04299da0-d810-4014-b79f-1ac8a45e1a8f', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 897.963503] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 897.963625] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 897.963861] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a86ad15-0514-49f6-806a-4b318baefe37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.986178] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 897.986178] env[68217]: value = "task-2961478" [ 897.986178] env[68217]: _type = "Task" [ 897.986178] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 897.995247] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961472, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.001478] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961478, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.184603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquiring lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.184922] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.185138] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquiring lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.185403] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.185610] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.187967] env[68217]: INFO nova.compute.manager [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Terminating instance [ 898.213274] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961475, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.307765] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.329727] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.357068] env[68217]: DEBUG oslo_concurrency.lockutils [None req-582ff291-17fa-4bb6-bd2f-ae3fc97cb0fb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 30.484s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.359388] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 8.300s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.359388] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.359593] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.359759] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 898.361741] env[68217]: INFO nova.compute.manager [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Terminating instance [ 898.497694] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961472, 'name': MoveVirtualDisk_Task} progress is 83%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.502933] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961478, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.695878] env[68217]: DEBUG nova.compute.manager [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 898.696683] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9ec153b1-2b2d-4690-84ff-2f3f136a8cc8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.710197] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced458f9-1ca1-4a1e-afd5-6aac2c60c238 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.728724] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961475, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.758592] env[68217]: WARNING nova.virt.vmwareapi.driver [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a could not be found. [ 898.758835] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.760239] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bbbbedb3-7635-44ae-86fb-d795e57148ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.762695] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd4d2492-d147-4565-a66d-c07c52df836a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.773782] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7451ae80-949d-4a02-8960-a6bc01a1c04d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.780136] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c40160c-c580-46b4-9a40-9c54fb45c63a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.838791] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0af96de-da83-40bd-bd69-3a73686d842c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.841644] env[68217]: WARNING nova.virt.vmwareapi.vmops [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a could not be found. [ 898.841969] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.842215] env[68217]: INFO nova.compute.manager [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Took 0.15 seconds to destroy the instance on the hypervisor. [ 898.842450] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 898.842781] env[68217]: DEBUG nova.compute.manager [-] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 898.842896] env[68217]: DEBUG nova.network.neutron [-] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 898.855644] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961477, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.857050] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a42b5f9-6158-43c5-b23f-9df82e6e7d6c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.875337] env[68217]: DEBUG nova.compute.manager [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 898.875554] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 898.876090] env[68217]: DEBUG nova.compute.provider_tree [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.877983] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-12139610-5c3d-4246-a06f-6309d7a4fd56 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.888631] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bf4643a-ab51-42eb-b660-5e33b97d6825 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.907402] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 898.907640] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 898.933577] env[68217]: WARNING nova.virt.vmwareapi.vmops [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 03d61c68-1b37-4172-b276-67a73a0dc228 could not be found. [ 898.933816] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 898.934039] env[68217]: INFO nova.compute.manager [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Took 0.06 seconds to destroy the instance on the hypervisor. [ 898.934397] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 898.938642] env[68217]: DEBUG nova.compute.manager [-] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 898.938642] env[68217]: DEBUG nova.network.neutron [-] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 898.991918] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961472, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.490937} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.995176] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74/OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74.vmdk to [datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7/9f4edacb-625d-403d-beb9-916f1ffd1cd7.vmdk. [ 898.995377] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Cleaning up location [datastore2] OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 898.995534] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_c755a21b-9d8f-4515-afeb-92512cfdde74 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 898.995788] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d9f22f15-ec8c-4cf9-9bc4-d610c4eefb1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.003981] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961478, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.005590] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 899.005590] env[68217]: value = "task-2961479" [ 899.005590] env[68217]: _type = "Task" [ 899.005590] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.008847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 899.008998] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquired lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 899.009176] env[68217]: DEBUG nova.network.neutron [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 899.015193] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.215958] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961475, 'name': ReconfigVM_Task, 'duration_secs': 1.266936} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.216297] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfigured VM instance instance-0000004a to attach disk [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.216909] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance 'fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 899.330355] env[68217]: DEBUG oslo_vmware.api [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961477, 'name': DeleteDatastoreFile_Task, 'duration_secs': 1.173297} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.330586] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.330739] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 899.330954] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 899.331104] env[68217]: INFO nova.compute.manager [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Took 2.18 seconds to destroy the instance on the hypervisor. [ 899.331349] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 899.331541] env[68217]: DEBUG nova.compute.manager [-] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 899.331636] env[68217]: DEBUG nova.network.neutron [-] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 899.379848] env[68217]: DEBUG nova.scheduler.client.report [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 899.409840] env[68217]: DEBUG nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 899.505036] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961478, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.519505] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.035619} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.520877] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 899.521069] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7/9f4edacb-625d-403d-beb9-916f1ffd1cd7.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 899.521325] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7/9f4edacb-625d-403d-beb9-916f1ffd1cd7.vmdk to [datastore2] 650ebd16-da81-475e-a82a-7fa5fb2880bc/650ebd16-da81-475e-a82a-7fa5fb2880bc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 899.521626] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e261a5f-0d1f-447b-b7d0-dc2670720a36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.530748] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 899.530748] env[68217]: value = "task-2961480" [ 899.530748] env[68217]: _type = "Task" [ 899.530748] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.540782] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.673306] env[68217]: DEBUG nova.compute.manager [req-7f5b164c-5660-49fe-a9e2-bd1dc8148e07 req-2de2b46d-fd0a-46b5-bb2b-ee0e9c5cd86a service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Received event network-vif-deleted-be6bc9f0-6147-4638-b306-5affbda64885 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 899.673306] env[68217]: INFO nova.compute.manager [req-7f5b164c-5660-49fe-a9e2-bd1dc8148e07 req-2de2b46d-fd0a-46b5-bb2b-ee0e9c5cd86a service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Neutron deleted interface be6bc9f0-6147-4638-b306-5affbda64885; detaching it from the instance and deleting it from the info cache [ 899.673430] env[68217]: DEBUG nova.network.neutron [req-7f5b164c-5660-49fe-a9e2-bd1dc8148e07 req-2de2b46d-fd0a-46b5-bb2b-ee0e9c5cd86a service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.709977] env[68217]: DEBUG nova.network.neutron [-] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.724573] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7cd6688-aa24-4c9f-a333-49c75b3712e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.748393] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d69dcfa-9b5b-496e-aeab-12249eacab5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.770784] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance 'fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 899.885560] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.037s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 899.890678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.358s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 899.891086] env[68217]: DEBUG nova.objects.instance [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lazy-loading 'resources' on Instance uuid e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.911269] env[68217]: INFO nova.scheduler.client.report [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Deleted allocations for instance b7fe971e-353f-427c-896c-32f9de0d70e7 [ 899.935264] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 900.003841] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961478, 'name': CreateVM_Task, 'duration_secs': 1.533189} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.004091] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 900.005032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.005313] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.005751] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 900.006622] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-81d2a011-55bf-49fc-81a6-fdd5ef89985c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.013939] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 900.013939] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bcd187-b441-e75b-8222-545832f74f6a" [ 900.013939] env[68217]: _type = "Task" [ 900.013939] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.020549] env[68217]: DEBUG nova.network.neutron [-] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.026797] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bcd187-b441-e75b-8222-545832f74f6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.041949] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task} progress is 12%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.176494] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-849a1804-af8e-4495-969b-79a7646cb2b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.190547] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb1a956f-a4bc-49a2-8144-760bc506dd94 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.204842] env[68217]: DEBUG nova.network.neutron [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [{"id": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "address": "fa:16:3e:2c:18:00", "network": {"id": "2456ed77-d69f-4430-b649-cebfb2e6e5c6", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-259900287-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "023b801c234d47d79cb57ea73058e81c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "e0c77754-4085-434b-a3e8-d61be099ac67", "external-id": "nsx-vlan-transportzone-822", "segmentation_id": 822, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap686a0657-d9", "ovs_interfaceid": "686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.217026] env[68217]: INFO nova.compute.manager [-] [instance: 03d61c68-1b37-4172-b276-67a73a0dc228] Took 1.28 seconds to deallocate network for instance. [ 900.238679] env[68217]: DEBUG nova.compute.manager [req-7f5b164c-5660-49fe-a9e2-bd1dc8148e07 req-2de2b46d-fd0a-46b5-bb2b-ee0e9c5cd86a service nova] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Detach interface failed, port_id=be6bc9f0-6147-4638-b306-5affbda64885, reason: Instance 95e625e9-a726-4c3c-be66-7b8ce93b5f8a could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 900.313892] env[68217]: DEBUG nova.network.neutron [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Port 747300c0-a758-483f-ba39-99efe6e731ec binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 900.426565] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1195a27-294f-4f30-af4c-b1e4e522730f tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "b7fe971e-353f-427c-896c-32f9de0d70e7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.645s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 900.528606] env[68217]: INFO nova.compute.manager [-] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Took 1.69 seconds to deallocate network for instance. [ 900.528991] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bcd187-b441-e75b-8222-545832f74f6a, 'name': SearchDatastore_Task, 'duration_secs': 0.030343} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.531182] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.531419] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 900.531644] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.531786] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 900.532039] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 900.534075] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be9f866e-c9f9-4f98-9af7-92530d762671 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.545196] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task} progress is 32%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.551520] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 900.551719] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 900.552548] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f58f4739-6470-4d3c-ac73-5722aaf2b2fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.559514] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 900.559514] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52747d8f-554d-df14-cd54-a5cd4bc5599a" [ 900.559514] env[68217]: _type = "Task" [ 900.559514] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.572882] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52747d8f-554d-df14-cd54-a5cd4bc5599a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.614084] env[68217]: DEBUG nova.network.neutron [-] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.710484] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Releasing lock "refresh_cache-58c15727-79ae-404f-a054-d71e3be498cc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 900.711313] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9666391-461f-4a4b-8f73-02d76db18278 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.722784] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Resuming the VM {{(pid=68217) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 900.722784] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-853de1ad-c8be-477a-a491-7fb2d2c7a5e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.736248] env[68217]: DEBUG oslo_vmware.api [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 900.736248] env[68217]: value = "task-2961481" [ 900.736248] env[68217]: _type = "Task" [ 900.736248] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.759952] env[68217]: DEBUG oslo_vmware.api [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961481, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.886822] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1fc10d4-d96b-4510-9d8b-5626e1b02ecd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.896806] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4f94c71-4f29-4bc6-9987-f90625d45d32 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.933193] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1116f613-eb05-4d17-b819-08e52fc05b24 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.942745] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f9214a1-0d43-4929-b12d-71943acc5923 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.959653] env[68217]: DEBUG nova.compute.provider_tree [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 901.043253] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.072313] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52747d8f-554d-df14-cd54-a5cd4bc5599a, 'name': SearchDatastore_Task, 'duration_secs': 0.018966} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.073189] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54a01028-b539-4728-a9d5-3747f4130faa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.080199] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 901.080199] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5264a649-f13c-77a5-98a0-3d52a6d2e999" [ 901.080199] env[68217]: _type = "Task" [ 901.080199] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.089901] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5264a649-f13c-77a5-98a0-3d52a6d2e999, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.097371] env[68217]: INFO nova.compute.manager [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Took 0.57 seconds to detach 1 volumes for instance. [ 901.100021] env[68217]: DEBUG nova.compute.manager [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Deleting volume: 19dc7d9f-d50d-45f0-8776-4c28a20691ad {{(pid=68217) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 901.119064] env[68217]: INFO nova.compute.manager [-] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Took 1.79 seconds to deallocate network for instance. [ 901.257905] env[68217]: DEBUG oslo_vmware.api [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961481, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.258514] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5c83039-bb5e-493d-ad83-2851f0d457cc tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "03d61c68-1b37-4172-b276-67a73a0dc228" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.899s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.343649] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.343951] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.344130] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.448753] env[68217]: DEBUG nova.compute.manager [req-24fe7f42-ca10-4800-9698-47019d4b84a5 req-bd65dac8-c11f-4d14-920d-b88e88d3a4bc service nova] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Received event network-vif-deleted-49bc7718-8633-456d-b4d1-6bcc8493670b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 901.462578] env[68217]: DEBUG nova.scheduler.client.report [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 901.546664] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.592158] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5264a649-f13c-77a5-98a0-3d52a6d2e999, 'name': SearchDatastore_Task, 'duration_secs': 0.016767} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.592642] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 901.593049] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 901.593349] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8ef8355a-edc1-459f-b47a-cc99c437a7c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.601374] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 901.601374] env[68217]: value = "task-2961483" [ 901.601374] env[68217]: _type = "Task" [ 901.601374] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.611314] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961483, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.626219] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.630973] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "23366029-e754-49dc-ba56-7a0d92232d81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.631259] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "23366029-e754-49dc-ba56-7a0d92232d81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.631400] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "23366029-e754-49dc-ba56-7a0d92232d81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.631584] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "23366029-e754-49dc-ba56-7a0d92232d81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.631754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "23366029-e754-49dc-ba56-7a0d92232d81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.634194] env[68217]: INFO nova.compute.manager [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Terminating instance [ 901.649962] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 901.749879] env[68217]: DEBUG oslo_vmware.api [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961481, 'name': PowerOnVM_Task} progress is 93%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.968052] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.077s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 901.970880] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.048s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 901.971192] env[68217]: DEBUG nova.objects.instance [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lazy-loading 'resources' on Instance uuid a7625a02-993b-4577-8d42-f763858a6154 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 901.999196] env[68217]: INFO nova.scheduler.client.report [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Deleted allocations for instance e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2 [ 902.052801] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.117712] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961483, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.139200] env[68217]: DEBUG nova.compute.manager [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 902.139487] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.140675] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-299f2108-dc1e-4c28-8a90-9e81f2ed0184 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.150887] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.151200] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e5e17b54-957a-4581-a2e0-3d7ddb3d4505 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.160996] env[68217]: DEBUG oslo_vmware.api [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 902.160996] env[68217]: value = "task-2961484" [ 902.160996] env[68217]: _type = "Task" [ 902.160996] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.172471] env[68217]: DEBUG oslo_vmware.api [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961484, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.249346] env[68217]: DEBUG oslo_vmware.api [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961481, 'name': PowerOnVM_Task, 'duration_secs': 1.325626} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.249636] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Resumed the VM {{(pid=68217) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 902.249824] env[68217]: DEBUG nova.compute.manager [None req-c216228c-a410-498e-902e-34101d8d2d7c tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 902.250637] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800e94fe-93e4-42f3-b6e0-d064e97b85d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.407189] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.408651] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 902.408651] env[68217]: DEBUG nova.network.neutron [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 902.410633] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "213b720b-b782-41c4-b60d-ef0af4b62932" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 902.411166] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 902.507755] env[68217]: DEBUG oslo_concurrency.lockutils [None req-53c19722-3cb7-41e0-88ac-50f4e124c8d8 tempest-ServersV294TestFqdnHostnames-2103129663 tempest-ServersV294TestFqdnHostnames-2103129663-project-member] Lock "e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.760s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 902.555322] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.616809] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961483, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.604528} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.617555] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 902.617729] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 902.618136] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e0459c9-83e1-4cbf-be6d-c9d47caddc4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.629752] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 902.629752] env[68217]: value = "task-2961485" [ 902.629752] env[68217]: _type = "Task" [ 902.629752] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.637536] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961485, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.671453] env[68217]: DEBUG oslo_vmware.api [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961484, 'name': PowerOffVM_Task, 'duration_secs': 0.320941} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.674832] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.674832] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.674929] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4118b704-4dc2-41ae-a155-848af6f21971 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.787291] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.787730] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.787952] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Deleting the datastore file [datastore1] 23366029-e754-49dc-ba56-7a0d92232d81 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.788711] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95fa77ba-c3f5-4bf4-a481-c99b11e742e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.797091] env[68217]: DEBUG oslo_vmware.api [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for the task: (returnval){ [ 902.797091] env[68217]: value = "task-2961487" [ 902.797091] env[68217]: _type = "Task" [ 902.797091] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.809452] env[68217]: DEBUG oslo_vmware.api [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961487, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.913568] env[68217]: DEBUG nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 902.922272] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32f840b-92eb-4dde-84c0-3e0235d536fd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.934884] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-373cadf2-8267-4d5d-8b02-8288a3465693 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.970746] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1a7be78-3752-409b-96fc-e5403d93af26 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.980846] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d37fbe0f-cbb6-4344-9725-29d00d72e479 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.997434] env[68217]: DEBUG nova.compute.provider_tree [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 903.048126] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961480, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.027997} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.049060] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/9f4edacb-625d-403d-beb9-916f1ffd1cd7/9f4edacb-625d-403d-beb9-916f1ffd1cd7.vmdk to [datastore2] 650ebd16-da81-475e-a82a-7fa5fb2880bc/650ebd16-da81-475e-a82a-7fa5fb2880bc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 903.051275] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99212321-2854-4f2b-9038-4c3c7f45cce6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.074731] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Reconfiguring VM instance instance-0000004e to attach disk [datastore2] 650ebd16-da81-475e-a82a-7fa5fb2880bc/650ebd16-da81-475e-a82a-7fa5fb2880bc.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.075429] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-96861e80-bd09-44de-b934-5f005679aacd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.095595] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 903.095595] env[68217]: value = "task-2961488" [ 903.095595] env[68217]: _type = "Task" [ 903.095595] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.108924] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961488, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.137203] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961485, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071402} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.137486] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 903.139588] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-623841d6-5a49-4399-801e-c8e1134b754e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.166768] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.167093] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-191ce6eb-c94b-424b-a2dd-21b3993bb9fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.187831] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 903.187831] env[68217]: value = "task-2961489" [ 903.187831] env[68217]: _type = "Task" [ 903.187831] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.196641] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961489, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.293337] env[68217]: DEBUG nova.network.neutron [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance_info_cache with network_info: [{"id": "747300c0-a758-483f-ba39-99efe6e731ec", "address": "fa:16:3e:2a:04:4a", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap747300c0-a7", "ovs_interfaceid": "747300c0-a758-483f-ba39-99efe6e731ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.307319] env[68217]: DEBUG oslo_vmware.api [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Task: {'id': task-2961487, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16563} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.308056] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.308252] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 903.308430] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 903.308600] env[68217]: INFO nova.compute.manager [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Took 1.17 seconds to destroy the instance on the hypervisor. [ 903.308837] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 903.309036] env[68217]: DEBUG nova.compute.manager [-] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 903.309139] env[68217]: DEBUG nova.network.neutron [-] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 903.459430] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 903.500874] env[68217]: DEBUG nova.scheduler.client.report [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 903.610688] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961488, 'name': ReconfigVM_Task, 'duration_secs': 0.296966} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.611007] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Reconfigured VM instance instance-0000004e to attach disk [datastore2] 650ebd16-da81-475e-a82a-7fa5fb2880bc/650ebd16-da81-475e-a82a-7fa5fb2880bc.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 903.613087] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7d486a57-1994-4709-9793-b96d8f25595a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.627049] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 903.627049] env[68217]: value = "task-2961490" [ 903.627049] env[68217]: _type = "Task" [ 903.627049] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.647898] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961490, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.703177] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961489, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.736488] env[68217]: DEBUG nova.compute.manager [req-4e0683d9-c265-422f-acc3-94e288c88a40 req-1884a80b-7ef0-4d88-bc15-929d5fd50ae5 service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Received event network-vif-deleted-96bf6c1f-33b5-4589-b488-c5be8d5892c6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 903.736630] env[68217]: INFO nova.compute.manager [req-4e0683d9-c265-422f-acc3-94e288c88a40 req-1884a80b-7ef0-4d88-bc15-929d5fd50ae5 service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Neutron deleted interface 96bf6c1f-33b5-4589-b488-c5be8d5892c6; detaching it from the instance and deleting it from the info cache [ 903.736794] env[68217]: DEBUG nova.network.neutron [req-4e0683d9-c265-422f-acc3-94e288c88a40 req-1884a80b-7ef0-4d88-bc15-929d5fd50ae5 service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.797187] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 904.006987] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.036s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.010681] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.565s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 904.014072] env[68217]: INFO nova.compute.claims [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.036236] env[68217]: INFO nova.scheduler.client.report [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted allocations for instance a7625a02-993b-4577-8d42-f763858a6154 [ 904.140029] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961490, 'name': Rename_Task, 'duration_secs': 0.191372} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.140029] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.140029] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-08cedf3f-b6f4-43d2-b29a-d5dbb0f11fc3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.145512] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 904.145512] env[68217]: value = "task-2961491" [ 904.145512] env[68217]: _type = "Task" [ 904.145512] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.154744] env[68217]: DEBUG nova.network.neutron [-] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.155952] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961491, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.200254] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961489, 'name': ReconfigVM_Task, 'duration_secs': 0.807701} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.200543] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Reconfigured VM instance instance-0000003f to attach disk [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 904.201192] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5b9f3d3b-3c6f-46b7-a848-2e39bba9e484 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.210019] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 904.210019] env[68217]: value = "task-2961492" [ 904.210019] env[68217]: _type = "Task" [ 904.210019] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.221435] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961492, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.240595] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a30cea7a-c58e-492f-b655-54297a890130 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.259017] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3617a046-5274-4835-b689-7d1fa826b478 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.302879] env[68217]: DEBUG nova.compute.manager [req-4e0683d9-c265-422f-acc3-94e288c88a40 req-1884a80b-7ef0-4d88-bc15-929d5fd50ae5 service nova] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Detach interface failed, port_id=96bf6c1f-33b5-4589-b488-c5be8d5892c6, reason: Instance 23366029-e754-49dc-ba56-7a0d92232d81 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 904.326418] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7296aaf6-b528-4c44-a2fa-92a18f2f3055 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.354027] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a0af0a-4393-4996-b91b-184d492a62e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.363110] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance 'fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 904.547105] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4b88f8fb-dc0e-467e-abf7-98c675de4917 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "a7625a02-993b-4577-8d42-f763858a6154" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.168s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 904.658067] env[68217]: INFO nova.compute.manager [-] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Took 1.35 seconds to deallocate network for instance. [ 904.659232] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961491, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.722362] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961492, 'name': Rename_Task, 'duration_secs': 0.169605} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.724805] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.724805] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e8215564-a101-4006-9666-2e313fc34e0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.731764] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 904.731764] env[68217]: value = "task-2961493" [ 904.731764] env[68217]: _type = "Task" [ 904.731764] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.742684] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961493, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.869262] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 904.869711] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f9c8862-cc99-43b4-a8c1-c65b4ae3de91 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.879608] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 904.879608] env[68217]: value = "task-2961494" [ 904.879608] env[68217]: _type = "Task" [ 904.879608] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.889245] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961494, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.158019] env[68217]: DEBUG oslo_vmware.api [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961491, 'name': PowerOnVM_Task, 'duration_secs': 0.557272} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.158019] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.158019] env[68217]: INFO nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Took 17.00 seconds to spawn the instance on the hypervisor. [ 905.158019] env[68217]: DEBUG nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.158854] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8910179-8533-41bc-bbf8-e50efcba4464 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.167399] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 905.253339] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961493, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.391568] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961494, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.539439] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc660727-99c3-4db9-b290-3d7680f4423c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.550606] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4c86553-5abd-4b7a-b6c9-bb76f014e9fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.590786] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219fa803-c1a9-4fc4-b2cd-35d91919c420 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.601151] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8d1749-3c74-4761-80a4-d80b9bebc2ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.616857] env[68217]: DEBUG nova.compute.provider_tree [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.683592] env[68217]: INFO nova.compute.manager [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Took 34.72 seconds to build instance. [ 905.743161] env[68217]: DEBUG oslo_vmware.api [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961493, 'name': PowerOnVM_Task, 'duration_secs': 0.754105} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.743501] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.743712] env[68217]: DEBUG nova.compute.manager [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 905.744585] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c620303-32af-4368-875f-a831871484cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.892703] env[68217]: DEBUG oslo_vmware.api [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961494, 'name': PowerOnVM_Task, 'duration_secs': 0.801631} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.892978] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 905.893180] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8dc5aed6-8e17-4a3c-8a63-01708cf2ce7a tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance 'fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 906.120781] env[68217]: DEBUG nova.scheduler.client.report [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 906.173705] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "650ebd16-da81-475e-a82a-7fa5fb2880bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.188290] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85cd619e-ef29-4748-8d44-b67b3e3a36a2 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.237s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.188690] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.015s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.188734] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "650ebd16-da81-475e-a82a-7fa5fb2880bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.188938] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.189124] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.191805] env[68217]: INFO nova.compute.manager [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Terminating instance [ 906.264482] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.627218] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.617s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 906.628912] env[68217]: DEBUG nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 906.635359] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.936s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 906.699951] env[68217]: DEBUG nova.compute.manager [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 906.700398] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.701724] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d12c0e6-5561-4d15-9d18-fc09d4f19ec7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.717272] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 906.717805] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1b2701b2-1ee0-41fd-aec8-e0a0bfed97e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.733710] env[68217]: DEBUG oslo_vmware.api [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 906.733710] env[68217]: value = "task-2961495" [ 906.733710] env[68217]: _type = "Task" [ 906.733710] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.746481] env[68217]: DEBUG oslo_vmware.api [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961495, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.754936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 906.757024] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 907.098643] env[68217]: INFO nova.compute.manager [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Rebuilding instance [ 907.141455] env[68217]: DEBUG nova.compute.utils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 907.143126] env[68217]: DEBUG nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 907.143318] env[68217]: DEBUG nova.network.neutron [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 907.147850] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62a2d4b1-1d61-474c-8cb6-93fc4da9c8d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.161123] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-383540da-6cd1-46e7-a937-d5d3a3b2f543 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.165156] env[68217]: DEBUG nova.compute.manager [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 907.166117] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7feebf21-7001-4e3c-af0c-1c2d752b957c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.211908] env[68217]: DEBUG nova.policy [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '421e01339dc845ccb7341fd3b5c90ebb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '87c7e14d2fe94e58bb3df92a8841486b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 907.214152] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ad2069e-5c08-43eb-9e24-3d63f776b0c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.224998] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7096b2d-6359-41b8-ae47-6424d2907c37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.241565] env[68217]: DEBUG nova.compute.provider_tree [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 907.253267] env[68217]: DEBUG oslo_vmware.api [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961495, 'name': PowerOffVM_Task, 'duration_secs': 0.312872} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.254234] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 907.254425] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 907.254696] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9569892a-c863-4a4b-8795-c25cba36a356 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.257378] env[68217]: DEBUG nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 907.447556] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 907.447805] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 907.447805] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleting the datastore file [datastore2] 650ebd16-da81-475e-a82a-7fa5fb2880bc {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 907.448634] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-475e9ee6-19e8-40f6-b2be-c6bf8da716c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.462822] env[68217]: DEBUG oslo_vmware.api [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 907.462822] env[68217]: value = "task-2961497" [ 907.462822] env[68217]: _type = "Task" [ 907.462822] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.475284] env[68217]: DEBUG oslo_vmware.api [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961497, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.607244] env[68217]: DEBUG nova.network.neutron [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Successfully created port: 1f77eb32-6eb4-42c4-8065-a7247f2c0c4a {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 907.646717] env[68217]: DEBUG nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 907.751032] env[68217]: DEBUG nova.scheduler.client.report [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 907.784324] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 907.975202] env[68217]: DEBUG oslo_vmware.api [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961497, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.289558} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.975202] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 907.975202] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 907.975202] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 907.975653] env[68217]: INFO nova.compute.manager [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Took 1.27 seconds to destroy the instance on the hypervisor. [ 907.976093] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 907.976408] env[68217]: DEBUG nova.compute.manager [-] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 907.976621] env[68217]: DEBUG nova.network.neutron [-] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 908.215273] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 908.215622] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e8dc67de-0668-42e7-a684-e45e431499e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.224506] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 908.224506] env[68217]: value = "task-2961498" [ 908.224506] env[68217]: _type = "Task" [ 908.224506] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.237856] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961498, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.255014] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.621s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.255800] env[68217]: INFO nova.compute.manager [None req-3c429704-f1f7-46a5-b892-3e4d4ba5136f tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Successfully reverted task state from rebuilding on failure for instance. [ 908.265194] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.956s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.265194] env[68217]: INFO nova.compute.claims [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 908.455495] env[68217]: DEBUG nova.compute.manager [req-5abe6ece-9acf-4dee-9ea2-366c19b24898 req-c0bb7aee-abc5-4b1e-b8d0-b74078b32738 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Received event network-vif-deleted-a2616d12-5ede-48a3-8191-248418505394 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 908.456069] env[68217]: INFO nova.compute.manager [req-5abe6ece-9acf-4dee-9ea2-366c19b24898 req-c0bb7aee-abc5-4b1e-b8d0-b74078b32738 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Neutron deleted interface a2616d12-5ede-48a3-8191-248418505394; detaching it from the instance and deleting it from the info cache [ 908.456298] env[68217]: DEBUG nova.network.neutron [req-5abe6ece-9acf-4dee-9ea2-366c19b24898 req-c0bb7aee-abc5-4b1e-b8d0-b74078b32738 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.667671] env[68217]: DEBUG nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 908.700664] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 908.700953] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 908.701176] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 908.701415] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 908.701595] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 908.701754] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 908.701962] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 908.702190] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 908.702541] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 908.702541] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 908.702657] env[68217]: DEBUG nova.virt.hardware [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 908.703615] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc3c635-7518-40c6-bcc7-2973079b83fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.713158] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5106f7d5-00ef-47d8-b289-e0e9c72d39dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.739315] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961498, 'name': PowerOffVM_Task, 'duration_secs': 0.302277} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.739594] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 908.739821] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 908.740591] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7906f27-947e-4087-b708-2be2f8b32422 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.747950] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 908.750564] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-07460608-6cad-4084-a1a0-8eb8f57776e5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.805461] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.805731] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.805970] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "58c15727-79ae-404f-a054-d71e3be498cc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 908.806193] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 908.806467] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 908.808988] env[68217]: INFO nova.compute.manager [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Terminating instance [ 908.814488] env[68217]: DEBUG nova.network.neutron [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Port 747300c0-a758-483f-ba39-99efe6e731ec binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 908.814714] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.814868] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 908.815037] env[68217]: DEBUG nova.network.neutron [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 908.827684] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 908.827788] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 908.827991] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleting the datastore file [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 908.828273] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-08bcdfc7-cd5a-4747-84c9-a1df04516520 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.836918] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 908.836918] env[68217]: value = "task-2961500" [ 908.836918] env[68217]: _type = "Task" [ 908.836918] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.841621] env[68217]: DEBUG nova.network.neutron [-] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.848423] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961500, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.959395] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fdfcb7eb-c078-4846-8853-68378cb9b76c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.970992] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63ef6ada-09f2-4a96-bccf-073404724649 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.018026] env[68217]: DEBUG nova.compute.manager [req-5abe6ece-9acf-4dee-9ea2-366c19b24898 req-c0bb7aee-abc5-4b1e-b8d0-b74078b32738 service nova] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Detach interface failed, port_id=a2616d12-5ede-48a3-8191-248418505394, reason: Instance 650ebd16-da81-475e-a82a-7fa5fb2880bc could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 909.318042] env[68217]: DEBUG nova.compute.manager [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 909.318042] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 909.320423] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08f1300c-157b-418b-9da2-f24ba86734d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.330902] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 909.331168] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6134cdab-df04-46de-a926-16e9a8868b84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.340607] env[68217]: DEBUG oslo_vmware.api [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 909.340607] env[68217]: value = "task-2961501" [ 909.340607] env[68217]: _type = "Task" [ 909.340607] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.351428] env[68217]: INFO nova.compute.manager [-] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Took 1.37 seconds to deallocate network for instance. [ 909.351428] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961500, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.140843} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.353662] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 909.353993] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 909.354329] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 909.361713] env[68217]: DEBUG oslo_vmware.api [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961501, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.458492] env[68217]: DEBUG nova.network.neutron [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Successfully updated port: 1f77eb32-6eb4-42c4-8065-a7247f2c0c4a {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 909.589119] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.589119] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 909.644449] env[68217]: DEBUG nova.network.neutron [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance_info_cache with network_info: [{"id": "747300c0-a758-483f-ba39-99efe6e731ec", "address": "fa:16:3e:2a:04:4a", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap747300c0-a7", "ovs_interfaceid": "747300c0-a758-483f-ba39-99efe6e731ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.715534] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb39281-1118-4d72-8834-fa9e72dafc31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.724100] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51114e60-6608-47c4-84a3-558f0b64d0a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.755149] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0e71d13-55bf-4a82-8108-f6bc35c40f80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.763156] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20db2678-a096-4445-b3bf-52b11debca86 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.778629] env[68217]: DEBUG nova.compute.provider_tree [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 909.851485] env[68217]: DEBUG oslo_vmware.api [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961501, 'name': PowerOffVM_Task, 'duration_secs': 0.381511} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.851998] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 909.852192] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 909.852468] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-35981e7b-bf40-4d1f-8a04-cde578bfd256 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.863086] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 909.929292] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 909.929527] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 909.929707] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleting the datastore file [datastore2] 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 909.930037] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d69b6ef3-709c-4de1-80d5-66b26d236acd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.939473] env[68217]: DEBUG oslo_vmware.api [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for the task: (returnval){ [ 909.939473] env[68217]: value = "task-2961503" [ 909.939473] env[68217]: _type = "Task" [ 909.939473] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.947871] env[68217]: DEBUG oslo_vmware.api [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961503, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.964754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "refresh_cache-7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.964878] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired lock "refresh_cache-7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 909.965032] env[68217]: DEBUG nova.network.neutron [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 910.094966] env[68217]: DEBUG nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 910.147227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 910.281758] env[68217]: DEBUG nova.scheduler.client.report [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 910.392792] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 910.393440] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 910.393440] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 910.393440] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 910.393584] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 910.393815] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 910.394098] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 910.394340] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 910.394525] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 910.394691] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 910.394862] env[68217]: DEBUG nova.virt.hardware [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 910.395743] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f893308b-3d0b-471a-b336-e704a7bdbf2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.404545] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46129901-c681-4584-a5d0-9905eb451ccd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.419013] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:9d:be', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1323cb03-8367-485a-962e-131af8eba474', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '04299da0-d810-4014-b79f-1ac8a45e1a8f', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.426528] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.426804] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 910.427034] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f031263a-f836-4127-acb8-125b35678be4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.450106] env[68217]: DEBUG oslo_vmware.api [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Task: {'id': task-2961503, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131409} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.451371] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 910.451564] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 910.451733] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 910.451902] env[68217]: INFO nova.compute.manager [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Took 1.13 seconds to destroy the instance on the hypervisor. [ 910.452170] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 910.452360] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.452360] env[68217]: value = "task-2961504" [ 910.452360] env[68217]: _type = "Task" [ 910.452360] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.452538] env[68217]: DEBUG nova.compute.manager [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 910.452634] env[68217]: DEBUG nova.network.neutron [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 910.462084] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961504, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.486440] env[68217]: DEBUG nova.compute.manager [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Received event network-vif-plugged-1f77eb32-6eb4-42c4-8065-a7247f2c0c4a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 910.486714] env[68217]: DEBUG oslo_concurrency.lockutils [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] Acquiring lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.486953] env[68217]: DEBUG oslo_concurrency.lockutils [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.487185] env[68217]: DEBUG oslo_concurrency.lockutils [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.487355] env[68217]: DEBUG nova.compute.manager [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] No waiting events found dispatching network-vif-plugged-1f77eb32-6eb4-42c4-8065-a7247f2c0c4a {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 910.487518] env[68217]: WARNING nova.compute.manager [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Received unexpected event network-vif-plugged-1f77eb32-6eb4-42c4-8065-a7247f2c0c4a for instance with vm_state building and task_state spawning. [ 910.487684] env[68217]: DEBUG nova.compute.manager [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Received event network-changed-1f77eb32-6eb4-42c4-8065-a7247f2c0c4a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 910.487816] env[68217]: DEBUG nova.compute.manager [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Refreshing instance network info cache due to event network-changed-1f77eb32-6eb4-42c4-8065-a7247f2c0c4a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 910.487983] env[68217]: DEBUG oslo_concurrency.lockutils [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] Acquiring lock "refresh_cache-7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.613691] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.650483] env[68217]: DEBUG nova.compute.manager [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68217) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 910.650749] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 910.785173] env[68217]: DEBUG nova.network.neutron [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 910.790954] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.528s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 910.791462] env[68217]: DEBUG nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 910.794313] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.859s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 910.799455] env[68217]: INFO nova.compute.claims [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 910.965937] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961504, 'name': CreateVM_Task, 'duration_secs': 0.331546} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.966224] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 910.966862] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.968286] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 910.968535] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 910.968807] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e18aec57-2abe-430b-9579-78051f1ac41b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.974719] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 910.974719] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52155dc8-f82e-e4e5-60fa-963ccd972277" [ 910.974719] env[68217]: _type = "Task" [ 910.974719] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.984726] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52155dc8-f82e-e4e5-60fa-963ccd972277, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.162453] env[68217]: DEBUG nova.network.neutron [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Updating instance_info_cache with network_info: [{"id": "1f77eb32-6eb4-42c4-8065-a7247f2c0c4a", "address": "fa:16:3e:05:bf:e5", "network": {"id": "4e1058e7-f813-46ae-9371-9bc53e8c24cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1123881374-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c7e14d2fe94e58bb3df92a8841486b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f77eb32-6e", "ovs_interfaceid": "1f77eb32-6eb4-42c4-8065-a7247f2c0c4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.304131] env[68217]: DEBUG nova.compute.utils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 911.307289] env[68217]: DEBUG nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 911.307419] env[68217]: DEBUG nova.network.neutron [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 911.345031] env[68217]: DEBUG nova.policy [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '752b6d9ab4d64b1390ca8388fb28db15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad331ad8f44348f6b4c0a6c56977022d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 911.485685] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52155dc8-f82e-e4e5-60fa-963ccd972277, 'name': SearchDatastore_Task, 'duration_secs': 0.011189} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.486021] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.487017] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 911.487017] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.487017] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.487017] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 911.487216] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-077828f0-7be6-405c-a22f-3fd83bc42ddf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.496716] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 911.496891] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 911.497610] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-770007de-d3d1-4c5a-bf40-321d71f07649 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.503591] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 911.503591] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e4ff7f-49ac-0df3-c804-f6bf462f63ee" [ 911.503591] env[68217]: _type = "Task" [ 911.503591] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.512577] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e4ff7f-49ac-0df3-c804-f6bf462f63ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.641615] env[68217]: DEBUG nova.network.neutron [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Successfully created port: 63b92bd9-8f95-481e-9ef4-468ea20dade1 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 911.665565] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Releasing lock "refresh_cache-7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 911.665916] env[68217]: DEBUG nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Instance network_info: |[{"id": "1f77eb32-6eb4-42c4-8065-a7247f2c0c4a", "address": "fa:16:3e:05:bf:e5", "network": {"id": "4e1058e7-f813-46ae-9371-9bc53e8c24cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1123881374-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c7e14d2fe94e58bb3df92a8841486b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f77eb32-6e", "ovs_interfaceid": "1f77eb32-6eb4-42c4-8065-a7247f2c0c4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 911.666251] env[68217]: DEBUG oslo_concurrency.lockutils [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] Acquired lock "refresh_cache-7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 911.666430] env[68217]: DEBUG nova.network.neutron [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Refreshing network info cache for port 1f77eb32-6eb4-42c4-8065-a7247f2c0c4a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 911.667676] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:05:bf:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f408ce42-3cac-4d9d-9c05-15471d653a18', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1f77eb32-6eb4-42c4-8065-a7247f2c0c4a', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 911.677102] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 911.677601] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 911.677866] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6a3a3ff3-cf8f-4c16-a530-10d9a15b2038 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.699342] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 911.699342] env[68217]: value = "task-2961505" [ 911.699342] env[68217]: _type = "Task" [ 911.699342] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.707984] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961505, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.746300] env[68217]: DEBUG nova.network.neutron [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.807991] env[68217]: DEBUG nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 912.017659] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e4ff7f-49ac-0df3-c804-f6bf462f63ee, 'name': SearchDatastore_Task, 'duration_secs': 0.01348} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.021326] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9fa7540-562f-4e36-9a88-4e03bc084607 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.029050] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 912.029050] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52739ae4-86d0-fa82-b007-97358bd2d548" [ 912.029050] env[68217]: _type = "Task" [ 912.029050] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.041447] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52739ae4-86d0-fa82-b007-97358bd2d548, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.207808] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85c1338f-f34c-4c38-acf6-bdaf6ac7fd65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.214726] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961505, 'name': CreateVM_Task, 'duration_secs': 0.33693} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.215245] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 912.215935] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.216122] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.216431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 912.216691] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-973eefb9-0ad8-4e60-8ac7-484e675a0057 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.221421] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-284cc804-de52-4357-bd63-e0480011e234 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.225784] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 912.225784] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528e53f1-85b6-fe35-2066-6dc9c0d4e835" [ 912.225784] env[68217]: _type = "Task" [ 912.225784] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.254809] env[68217]: INFO nova.compute.manager [-] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Took 1.80 seconds to deallocate network for instance. [ 912.257743] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d921ed61-4560-4a93-9960-caa614527c8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.266419] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528e53f1-85b6-fe35-2066-6dc9c0d4e835, 'name': SearchDatastore_Task, 'duration_secs': 0.01002} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.267202] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.267432] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 912.267662] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.267803] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 912.268195] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 912.268639] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ce1a7849-7b1a-4fb8-9ee0-65bfad8543f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.274080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aa46d2-c763-4933-8aa6-70961c000a66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.279182] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 912.279389] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 912.287282] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-064f65c0-d6e1-4556-91d6-63f1c54611d5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.289613] env[68217]: DEBUG nova.compute.provider_tree [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.295273] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 912.295273] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5277ee21-be5c-1851-cf93-e58a7eceec51" [ 912.295273] env[68217]: _type = "Task" [ 912.295273] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.303209] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5277ee21-be5c-1851-cf93-e58a7eceec51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.481818] env[68217]: DEBUG nova.network.neutron [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Updated VIF entry in instance network info cache for port 1f77eb32-6eb4-42c4-8065-a7247f2c0c4a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 912.482196] env[68217]: DEBUG nova.network.neutron [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Updating instance_info_cache with network_info: [{"id": "1f77eb32-6eb4-42c4-8065-a7247f2c0c4a", "address": "fa:16:3e:05:bf:e5", "network": {"id": "4e1058e7-f813-46ae-9371-9bc53e8c24cd", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1123881374-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "87c7e14d2fe94e58bb3df92a8841486b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f408ce42-3cac-4d9d-9c05-15471d653a18", "external-id": "nsx-vlan-transportzone-265", "segmentation_id": 265, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1f77eb32-6e", "ovs_interfaceid": "1f77eb32-6eb4-42c4-8065-a7247f2c0c4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.539477] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52739ae4-86d0-fa82-b007-97358bd2d548, 'name': SearchDatastore_Task, 'duration_secs': 0.011102} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.539743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 912.540009] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 912.540291] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8bb61cd0-f76b-4541-a7b3-b4cd1bf73657 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.547933] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 912.547933] env[68217]: value = "task-2961506" [ 912.547933] env[68217]: _type = "Task" [ 912.547933] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.554943] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961506, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.758653] env[68217]: DEBUG nova.compute.manager [req-80d20263-c53f-4bbb-8c4b-b252fc4ec6e4 req-81dd0178-7956-426a-9dd5-0d830540e93d service nova] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Received event network-vif-deleted-686a0657-d9b3-47e8-bc1f-12aaf8f0a9c0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 912.770217] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 912.793452] env[68217]: DEBUG nova.scheduler.client.report [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 912.808147] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5277ee21-be5c-1851-cf93-e58a7eceec51, 'name': SearchDatastore_Task, 'duration_secs': 0.009312} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.809089] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74c541f0-881d-4a4e-a276-851b7da0b295 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.816085] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 912.816085] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529850ff-4d8b-1729-e994-31f447b5a065" [ 912.816085] env[68217]: _type = "Task" [ 912.816085] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.820675] env[68217]: DEBUG nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 912.830856] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529850ff-4d8b-1729-e994-31f447b5a065, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.854644] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 912.855369] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.855470] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 912.855737] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.855934] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 912.856223] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 912.856515] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 912.856794] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 912.857150] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 912.857459] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 912.857708] env[68217]: DEBUG nova.virt.hardware [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 912.858952] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c6194e-c81f-4507-bd9e-97fe300601f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.871787] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf20240-aec3-4db6-a759-06d6a23c173d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.985471] env[68217]: DEBUG oslo_concurrency.lockutils [req-35fcd287-6bcf-4b94-aa43-8618e104f383 req-1e233c47-bb18-4a97-abc7-ced8f99d50a9 service nova] Releasing lock "refresh_cache-7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.060689] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961506, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499528} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.061038] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.061337] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.061666] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0fb2d27c-7376-4043-9c53-c932699f4949 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.071147] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 913.071147] env[68217]: value = "task-2961507" [ 913.071147] env[68217]: _type = "Task" [ 913.071147] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.081829] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961507, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.247405] env[68217]: DEBUG nova.network.neutron [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Successfully updated port: 63b92bd9-8f95-481e-9ef4-468ea20dade1 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 913.301766] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.507s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.302125] env[68217]: DEBUG nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 913.304746] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.679s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.304827] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.307599] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 11.657s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 913.307599] env[68217]: DEBUG nova.objects.instance [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lazy-loading 'resources' on Instance uuid 95e625e9-a726-4c3c-be66-7b8ce93b5f8a {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.328541] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529850ff-4d8b-1729-e994-31f447b5a065, 'name': SearchDatastore_Task, 'duration_secs': 0.019815} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.328832] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 913.328902] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1/7371d4d3-e255-4a1f-8d5f-2ee1297e89d1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 913.329991] env[68217]: INFO nova.scheduler.client.report [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted allocations for instance 2612f6fc-a43f-4011-8a09-51088a49371a [ 913.330976] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-05854497-54cd-4d34-a6a4-894f5c472d7e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.343951] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 913.343951] env[68217]: value = "task-2961508" [ 913.343951] env[68217]: _type = "Task" [ 913.343951] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.353550] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961508, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.582874] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961507, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076259} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.583178] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.584129] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4aad947-b011-4713-aafd-9bc4062b451f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.609558] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Reconfiguring VM instance instance-0000003f to attach disk [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.609899] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4d032692-7609-44c1-bd8d-cc40828a4ad2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.633659] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 913.633659] env[68217]: value = "task-2961509" [ 913.633659] env[68217]: _type = "Task" [ 913.633659] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.645449] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961509, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.751955] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.752137] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 913.752299] env[68217]: DEBUG nova.network.neutron [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 913.813020] env[68217]: DEBUG nova.compute.utils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 913.815071] env[68217]: DEBUG nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 913.840314] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74741d00-9185-4673-9305-f7d37ed8a419 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "2612f6fc-a43f-4011-8a09-51088a49371a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.200s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 913.855613] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961508, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45912} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.858761] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1/7371d4d3-e255-4a1f-8d5f-2ee1297e89d1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 913.859017] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 913.859517] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-fd25c4cb-2091-4068-80c3-109b1a3e1ac2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.866875] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 913.866875] env[68217]: value = "task-2961510" [ 913.866875] env[68217]: _type = "Task" [ 913.866875] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.878828] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961510, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.147302] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961509, 'name': ReconfigVM_Task, 'duration_secs': 0.434429} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.147583] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Reconfigured VM instance instance-0000003f to attach disk [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9/aa4b9cc8-d0dc-4a0b-9eec-dceace695df9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.148227] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1eaeb7d4-d8fc-40f5-a5c0-4cca948eb251 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.151049] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c42f25-c852-4742-b053-c4d1ef933082 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.158435] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c750f9f4-4944-470d-ad35-99d14a486ee6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.162762] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 914.162762] env[68217]: value = "task-2961511" [ 914.162762] env[68217]: _type = "Task" [ 914.162762] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.195843] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad47d94d-2079-4927-be55-160cdc579228 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.198671] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961511, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.204260] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7969f5ea-6860-4485-a193-d63ce56f81e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.218715] env[68217]: DEBUG nova.compute.provider_tree [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.301330] env[68217]: DEBUG nova.network.neutron [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 914.316178] env[68217]: DEBUG nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 914.376835] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961510, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069705} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.377122] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 914.377879] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a490dee-52cf-4373-ac34-31bbf5eb0f85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.403298] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1/7371d4d3-e255-4a1f-8d5f-2ee1297e89d1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 914.403590] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3992af55-f3a0-42ef-9488-5c84253fc22e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.424078] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 914.424078] env[68217]: value = "task-2961512" [ 914.424078] env[68217]: _type = "Task" [ 914.424078] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.435300] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961512, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.487297] env[68217]: DEBUG nova.network.neutron [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance_info_cache with network_info: [{"id": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "address": "fa:16:3e:e8:e5:1d", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b92bd9-8f", "ovs_interfaceid": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.673091] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961511, 'name': Rename_Task, 'duration_secs': 0.171474} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.673383] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.673630] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fa1a9b3b-3005-4e38-8156-a491140e8508 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.680359] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 914.680359] env[68217]: value = "task-2961513" [ 914.680359] env[68217]: _type = "Task" [ 914.680359] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.688424] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961513, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.723597] env[68217]: DEBUG nova.scheduler.client.report [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 914.786446] env[68217]: DEBUG nova.compute.manager [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Received event network-vif-plugged-63b92bd9-8f95-481e-9ef4-468ea20dade1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.786653] env[68217]: DEBUG oslo_concurrency.lockutils [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] Acquiring lock "35817c87-0c55-49bd-917a-59bd39de663c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 914.786856] env[68217]: DEBUG oslo_concurrency.lockutils [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] Lock "35817c87-0c55-49bd-917a-59bd39de663c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 914.787030] env[68217]: DEBUG oslo_concurrency.lockutils [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] Lock "35817c87-0c55-49bd-917a-59bd39de663c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 914.787213] env[68217]: DEBUG nova.compute.manager [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] No waiting events found dispatching network-vif-plugged-63b92bd9-8f95-481e-9ef4-468ea20dade1 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 914.787430] env[68217]: WARNING nova.compute.manager [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Received unexpected event network-vif-plugged-63b92bd9-8f95-481e-9ef4-468ea20dade1 for instance with vm_state building and task_state spawning. [ 914.787650] env[68217]: DEBUG nova.compute.manager [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Received event network-changed-63b92bd9-8f95-481e-9ef4-468ea20dade1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 914.787811] env[68217]: DEBUG nova.compute.manager [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Refreshing instance network info cache due to event network-changed-63b92bd9-8f95-481e-9ef4-468ea20dade1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 914.787974] env[68217]: DEBUG oslo_concurrency.lockutils [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] Acquiring lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 914.935571] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961512, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.988999] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 914.989460] env[68217]: DEBUG nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Instance network_info: |[{"id": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "address": "fa:16:3e:e8:e5:1d", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b92bd9-8f", "ovs_interfaceid": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 914.989893] env[68217]: DEBUG oslo_concurrency.lockutils [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] Acquired lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 914.990198] env[68217]: DEBUG nova.network.neutron [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Refreshing network info cache for port 63b92bd9-8f95-481e-9ef4-468ea20dade1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 914.991921] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e8:e5:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63b92bd9-8f95-481e-9ef4-468ea20dade1', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.002080] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.005023] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 915.005560] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a7e1fe68-ef7e-41c9-8bd8-49be45b61616 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.026475] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 915.026475] env[68217]: value = "task-2961514" [ 915.026475] env[68217]: _type = "Task" [ 915.026475] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.035341] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961514, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.191262] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961513, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.227401] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.920s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.230680] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.771s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 915.232579] env[68217]: INFO nova.compute.claims [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 915.281868] env[68217]: DEBUG nova.network.neutron [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updated VIF entry in instance network info cache for port 63b92bd9-8f95-481e-9ef4-468ea20dade1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 915.282239] env[68217]: DEBUG nova.network.neutron [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance_info_cache with network_info: [{"id": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "address": "fa:16:3e:e8:e5:1d", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b92bd9-8f", "ovs_interfaceid": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 915.329293] env[68217]: DEBUG nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 915.353313] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 915.353553] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 915.353709] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 915.353883] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 915.354034] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 915.354193] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 915.354485] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 915.354669] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 915.354838] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 915.354997] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 915.355470] env[68217]: DEBUG nova.virt.hardware [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 915.356119] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-583f3445-7d03-4006-b7a0-2d9212a17392 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.364849] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f184cc3b-01ec-43c2-ba71-85c61a6a90c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.378608] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 915.384228] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Creating folder: Project (5ffd141b14024461bb89dfc09629e239). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.384543] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c17b1bd0-4c00-434d-bd48-a33caaefdd28 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.393079] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Created folder: Project (5ffd141b14024461bb89dfc09629e239) in parent group-v594094. [ 915.393263] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Creating folder: Instances. Parent ref: group-v594319. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 915.393491] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7f58686-35f0-4f05-9f4b-ee91a3c46768 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.403411] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Created folder: Instances in parent group-v594319. [ 915.403655] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 915.403852] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 915.404077] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2161feb2-9d5c-47f9-95a4-4ce45267ce22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.420132] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 915.420132] env[68217]: value = "task-2961517" [ 915.420132] env[68217]: _type = "Task" [ 915.420132] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.427598] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961517, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.434880] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961512, 'name': ReconfigVM_Task, 'duration_secs': 0.665605} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.435138] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1/7371d4d3-e255-4a1f-8d5f-2ee1297e89d1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 915.435717] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2de5d689-73cf-4dfe-ab73-c652cc561b02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.442274] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 915.442274] env[68217]: value = "task-2961518" [ 915.442274] env[68217]: _type = "Task" [ 915.442274] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.452570] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961518, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.536406] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961514, 'name': CreateVM_Task, 'duration_secs': 0.349738} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.536599] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.537364] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.537532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.537852] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 915.538130] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e28ab0b4-01ae-4a88-95c2-c375fa0876f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.542612] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 915.542612] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5279e18f-5d87-38fb-aa56-5b1191954686" [ 915.542612] env[68217]: _type = "Task" [ 915.542612] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.552184] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5279e18f-5d87-38fb-aa56-5b1191954686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.691089] env[68217]: DEBUG oslo_vmware.api [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961513, 'name': PowerOnVM_Task, 'duration_secs': 0.585902} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.691340] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.691545] env[68217]: DEBUG nova.compute.manager [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 915.692344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e9d3e30-d132-4240-b60b-2b7cfcab0c9f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.747198] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1f13762-6301-4778-a9e6-1d4c496e687d tempest-ServerActionsV293TestJSON-481733303 tempest-ServerActionsV293TestJSON-481733303-project-member] Lock "95e625e9-a726-4c3c-be66-7b8ce93b5f8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.562s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 915.784937] env[68217]: DEBUG oslo_concurrency.lockutils [req-855ac0b4-539e-4d6b-856a-87037b2eae8f req-3bb6fc65-98b1-471a-bc53-6a2fd1f8d6f8 service nova] Releasing lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 915.930876] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961517, 'name': CreateVM_Task, 'duration_secs': 0.306779} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.930876] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 915.931922] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.931922] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 915.932360] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 915.932593] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94eacee8-7bc9-419c-9016-02c99d90ec46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.937308] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 915.937308] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524effb9-b278-cca3-7830-6d85a29a244a" [ 915.937308] env[68217]: _type = "Task" [ 915.937308] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.944915] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524effb9-b278-cca3-7830-6d85a29a244a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.951895] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961518, 'name': Rename_Task, 'duration_secs': 0.179134} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.952205] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 915.952409] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ab0ae96e-d0dc-4431-bc65-0453e7604ae8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.958590] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 915.958590] env[68217]: value = "task-2961519" [ 915.958590] env[68217]: _type = "Task" [ 915.958590] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.973339] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961519, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.052028] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5279e18f-5d87-38fb-aa56-5b1191954686, 'name': SearchDatastore_Task, 'duration_secs': 0.010084} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.052355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.052612] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.052840] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.052984] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.053174] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.053429] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d524b4a4-c6bf-4347-8bd9-63bb348bc19e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.066765] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.066944] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.067667] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfc0ca65-a366-4ca7-a3f3-46f5c1f93768 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.072653] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 916.072653] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297e275-ebf0-51b5-978d-160016d5a076" [ 916.072653] env[68217]: _type = "Task" [ 916.072653] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.079960] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297e275-ebf0-51b5-978d-160016d5a076, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.208866] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.449132] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524effb9-b278-cca3-7830-6d85a29a244a, 'name': SearchDatastore_Task, 'duration_secs': 0.019106} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.451699] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.451939] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 916.452184] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 916.452373] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 916.452502] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 916.453241] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2db18d2a-16f8-4074-9222-f347475bde73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.464443] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 916.464443] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 916.465023] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84e29235-f2df-4d4c-9183-79247f992f17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.472878] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961519, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.476222] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 916.476222] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52484ac6-fd00-6df0-a528-1e6d0002b17b" [ 916.476222] env[68217]: _type = "Task" [ 916.476222] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.485784] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52484ac6-fd00-6df0-a528-1e6d0002b17b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.584057] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297e275-ebf0-51b5-978d-160016d5a076, 'name': SearchDatastore_Task, 'duration_secs': 0.031595} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.585045] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f139382a-b021-487d-983e-394b1a9a530d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.589951] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8ac0e6-d340-487c-a0c0-dd26686499bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.593745] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 916.593745] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b78153-673a-b0b7-cfe2-14202df91e83" [ 916.593745] env[68217]: _type = "Task" [ 916.593745] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.599680] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27df43ba-6d3c-42e0-be0e-c01ac7b11dfc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.605818] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b78153-673a-b0b7-cfe2-14202df91e83, 'name': SearchDatastore_Task, 'duration_secs': 0.011602} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.606350] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 916.606619] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 35817c87-0c55-49bd-917a-59bd39de663c/35817c87-0c55-49bd-917a-59bd39de663c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 916.606885] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d80fa743-effd-4911-b7ae-77f828c61f13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.633582] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e92066-e4f8-449a-8139-fc5d635c6727 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.638121] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 916.638121] env[68217]: value = "task-2961520" [ 916.638121] env[68217]: _type = "Task" [ 916.638121] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.648084] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1de2d3b-7447-4bb0-8070-f04fe3923788 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.655680] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961520, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.665969] env[68217]: DEBUG nova.compute.provider_tree [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 916.670999] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.672022] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 916.695339] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "ca9ef7ff-b942-4363-a4f8-9163791ec162" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.695638] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.695863] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "ca9ef7ff-b942-4363-a4f8-9163791ec162-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 916.696088] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 916.696268] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 916.698341] env[68217]: INFO nova.compute.manager [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Terminating instance [ 916.970876] env[68217]: DEBUG oslo_vmware.api [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961519, 'name': PowerOnVM_Task, 'duration_secs': 0.83951} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.970876] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 916.971242] env[68217]: INFO nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Took 8.30 seconds to spawn the instance on the hypervisor. [ 916.971489] env[68217]: DEBUG nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 916.972117] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d0300e8-594b-41d3-9f47-a711b610d679 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.990997] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52484ac6-fd00-6df0-a528-1e6d0002b17b, 'name': SearchDatastore_Task, 'duration_secs': 0.02093} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.992242] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21aaa6ad-d700-4b03-ad27-e5e97addfcd3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.998579] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 916.998579] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f57a06-f93b-9851-9a9f-ba5e9fe1e804" [ 916.998579] env[68217]: _type = "Task" [ 916.998579] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.007102] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f57a06-f93b-9851-9a9f-ba5e9fe1e804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.150711] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961520, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.170094] env[68217]: DEBUG nova.scheduler.client.report [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 917.181200] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.181200] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.181200] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.181200] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.181200] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.181200] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.181458] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 917.181458] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 917.202906] env[68217]: DEBUG nova.compute.manager [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 917.203142] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 917.204158] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415b13ed-e0bb-4847-af43-063027d823b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.212178] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.212433] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-36a6f504-90b3-4968-85cf-e52665bac476 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.218651] env[68217]: DEBUG oslo_vmware.api [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 917.218651] env[68217]: value = "task-2961521" [ 917.218651] env[68217]: _type = "Task" [ 917.218651] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.226739] env[68217]: DEBUG oslo_vmware.api [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961521, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.497891] env[68217]: INFO nova.compute.manager [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Took 21.08 seconds to build instance. [ 917.509188] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f57a06-f93b-9851-9a9f-ba5e9fe1e804, 'name': SearchDatastore_Task, 'duration_secs': 0.021074} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.509188] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 917.509188] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 917.509188] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1ce30422-22b9-428d-800c-2e169f1abe0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.516078] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 917.516078] env[68217]: value = "task-2961522" [ 917.516078] env[68217]: _type = "Task" [ 917.516078] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.524517] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961522, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.650502] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961520, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.59139} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.650502] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 35817c87-0c55-49bd-917a-59bd39de663c/35817c87-0c55-49bd-917a-59bd39de663c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 917.650674] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 917.650936] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ab0fd6a-3546-4ca8-ad82-51b2c5949135 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.660968] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 917.660968] env[68217]: value = "task-2961523" [ 917.660968] env[68217]: _type = "Task" [ 917.660968] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.670137] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961523, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.681497] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 917.682665] env[68217]: DEBUG nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 917.685769] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.518s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 917.686058] env[68217]: DEBUG nova.objects.instance [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lazy-loading 'resources' on Instance uuid 23366029-e754-49dc-ba56-7a0d92232d81 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.688181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 917.733079] env[68217]: DEBUG oslo_vmware.api [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961521, 'name': PowerOffVM_Task, 'duration_secs': 0.203621} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.733079] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.733079] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.733374] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f3ab9d1-7f33-461f-b436-d3a0a4d1e69d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.807140] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.807409] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.807603] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleting the datastore file [datastore1] ca9ef7ff-b942-4363-a4f8-9163791ec162 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.807888] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b20d2ff9-3563-4af6-ad8d-00c590d0599c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.816221] env[68217]: DEBUG oslo_vmware.api [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 917.816221] env[68217]: value = "task-2961525" [ 917.816221] env[68217]: _type = "Task" [ 917.816221] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.826455] env[68217]: DEBUG oslo_vmware.api [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961525, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.999611] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66b138d0-daad-44c3-80b2-f3181a9d8667 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.608s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 918.026341] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961522, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.501383} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.027209] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 918.027427] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 918.027676] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a36a5961-9627-4895-8606-5082eadc926b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.034261] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 918.034261] env[68217]: value = "task-2961526" [ 918.034261] env[68217]: _type = "Task" [ 918.034261] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.042040] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961526, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.172721] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961523, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066209} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.173394] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.174262] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91263679-f266-428e-bfc1-1c1d450687f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.197773] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 35817c87-0c55-49bd-917a-59bd39de663c/35817c87-0c55-49bd-917a-59bd39de663c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.199160] env[68217]: DEBUG nova.compute.utils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 918.203827] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36b8542e-1157-4376-b4b2-774b5500b703 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.221024] env[68217]: DEBUG nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 918.221024] env[68217]: DEBUG nova.network.neutron [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 918.229417] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 918.229417] env[68217]: value = "task-2961527" [ 918.229417] env[68217]: _type = "Task" [ 918.229417] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.238130] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961527, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.271435] env[68217]: DEBUG nova.policy [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '554b6b3d22404c0ba52c739b3c7b98a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb62d18446841a3b2a6ac25ab5dc869', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 918.326261] env[68217]: DEBUG oslo_vmware.api [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961525, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.290048} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.328746] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.328938] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.329179] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.329362] env[68217]: INFO nova.compute.manager [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Took 1.13 seconds to destroy the instance on the hypervisor. [ 918.329602] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 918.329981] env[68217]: DEBUG nova.compute.manager [-] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 918.330095] env[68217]: DEBUG nova.network.neutron [-] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 918.514339] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-580e6909-7d05-447a-a378-f0b8b71f059a-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 918.514674] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-580e6909-7d05-447a-a378-f0b8b71f059a-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 918.515131] env[68217]: DEBUG nova.objects.instance [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'flavor' on Instance uuid 580e6909-7d05-447a-a378-f0b8b71f059a {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 918.546688] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961526, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168559} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.546986] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 918.547820] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06e40520-e3dc-4a46-9c0c-a8d1ebd5f266 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.572135] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 918.575077] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-92874a6c-954f-404b-a284-917e0ba30437 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.596647] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 918.596647] env[68217]: value = "task-2961528" [ 918.596647] env[68217]: _type = "Task" [ 918.596647] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.601909] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9edd66d5-7202-4d96-9d17-fd476783d4fd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.609146] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961528, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.611819] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483b1797-643d-46e8-8d7e-1ddc98cf5531 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.643226] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2fb220-f123-4650-a16a-033614b6eef9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.651221] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a3be0f-de1a-45b1-b726-7cce0dcb8858 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.668764] env[68217]: DEBUG nova.compute.provider_tree [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 918.721293] env[68217]: DEBUG nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 918.738787] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.820416] env[68217]: DEBUG nova.network.neutron [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Successfully created port: 41cb41cd-7c04-4409-948d-b45a5441a4f4 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 919.075384] env[68217]: DEBUG nova.compute.manager [req-e8e6d54a-2b3b-49af-8477-01610b087bc9 req-4bf4761a-03ef-4ec4-bac9-79ecc9bf29d8 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Received event network-vif-deleted-5002fb09-ddc6-4497-a55f-8cfe415c4d70 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 919.076126] env[68217]: INFO nova.compute.manager [req-e8e6d54a-2b3b-49af-8477-01610b087bc9 req-4bf4761a-03ef-4ec4-bac9-79ecc9bf29d8 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Neutron deleted interface 5002fb09-ddc6-4497-a55f-8cfe415c4d70; detaching it from the instance and deleting it from the info cache [ 919.077594] env[68217]: DEBUG nova.network.neutron [req-e8e6d54a-2b3b-49af-8477-01610b087bc9 req-4bf4761a-03ef-4ec4-bac9-79ecc9bf29d8 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.107968] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961528, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.135555] env[68217]: DEBUG nova.objects.instance [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'pci_requests' on Instance uuid 580e6909-7d05-447a-a378-f0b8b71f059a {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 919.173015] env[68217]: DEBUG nova.scheduler.client.report [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 919.239960] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961527, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.300779] env[68217]: DEBUG nova.network.neutron [-] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 919.580113] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-550cd504-315b-4457-833c-750cf8c214e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.589612] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-406d014c-a77c-4efc-8e80-09818afb455d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.609072] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961528, 'name': ReconfigVM_Task, 'duration_secs': 0.819075} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.609348] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.609958] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ee485a28-4d73-41c6-aa52-4c38bed669bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.616852] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 919.616852] env[68217]: value = "task-2961529" [ 919.616852] env[68217]: _type = "Task" [ 919.616852] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.634246] env[68217]: DEBUG nova.compute.manager [req-e8e6d54a-2b3b-49af-8477-01610b087bc9 req-4bf4761a-03ef-4ec4-bac9-79ecc9bf29d8 service nova] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Detach interface failed, port_id=5002fb09-ddc6-4497-a55f-8cfe415c4d70, reason: Instance ca9ef7ff-b942-4363-a4f8-9163791ec162 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 919.640243] env[68217]: DEBUG nova.objects.base [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Object Instance<580e6909-7d05-447a-a378-f0b8b71f059a> lazy-loaded attributes: flavor,pci_requests {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 919.640499] env[68217]: DEBUG nova.network.neutron [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 919.642499] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961529, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.679940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 919.683332] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 13.419s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 919.683332] env[68217]: DEBUG nova.objects.instance [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 919.687530] env[68217]: DEBUG nova.policy [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 919.707316] env[68217]: INFO nova.scheduler.client.report [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Deleted allocations for instance 23366029-e754-49dc-ba56-7a0d92232d81 [ 919.734610] env[68217]: DEBUG nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 919.747087] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961527, 'name': ReconfigVM_Task, 'duration_secs': 1.076542} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 919.748619] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 35817c87-0c55-49bd-917a-59bd39de663c/35817c87-0c55-49bd-917a-59bd39de663c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 919.748619] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-edfb5bdf-7522-4b57-b878-e02e6a2cb6bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.756353] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 919.756353] env[68217]: value = "task-2961530" [ 919.756353] env[68217]: _type = "Task" [ 919.756353] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.767727] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 919.767886] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 919.768112] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 919.768375] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 919.768670] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 919.768963] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 919.769556] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 919.770064] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 919.770064] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 919.770279] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 919.770522] env[68217]: DEBUG nova.virt.hardware [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 919.773146] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b60240-b1ea-48eb-bdd4-2067b5974332 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.784121] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961530, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.790659] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52767ee-38b0-4673-999c-d41c44c3dcb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.808207] env[68217]: INFO nova.compute.manager [-] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Took 1.48 seconds to deallocate network for instance. [ 920.019522] env[68217]: DEBUG nova.network.neutron [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Successfully created port: efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 920.131458] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961529, 'name': Rename_Task, 'duration_secs': 0.163569} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.131716] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.131967] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b2670f25-c4dc-4a7e-99bb-ea6c7b64b032 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.137836] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 920.137836] env[68217]: value = "task-2961531" [ 920.137836] env[68217]: _type = "Task" [ 920.137836] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.155901] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961531, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.215201] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54fe4f1a-e312-4bb5-ae33-ad487f74d9a2 tempest-ListImageFiltersTestJSON-2008574723 tempest-ListImageFiltersTestJSON-2008574723-project-member] Lock "23366029-e754-49dc-ba56-7a0d92232d81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.584s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.268702] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961530, 'name': Rename_Task, 'duration_secs': 0.445415} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.268990] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 920.269254] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a37b0b6e-b706-45f1-8a0d-539e694d0006 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.275274] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 920.275274] env[68217]: value = "task-2961532" [ 920.275274] env[68217]: _type = "Task" [ 920.275274] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 920.283525] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961532, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.314335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.649322] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961531, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.697062] env[68217]: DEBUG oslo_concurrency.lockutils [None req-787a9d98-f2c7-4ce9-944c-0a335f940fe2 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.697825] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.914s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.702037] env[68217]: INFO nova.compute.claims [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 920.786980] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961532, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.826770] env[68217]: DEBUG nova.network.neutron [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Successfully updated port: 41cb41cd-7c04-4409-948d-b45a5441a4f4 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 920.904625] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.904849] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.905072] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 920.905263] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 920.905455] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 920.907591] env[68217]: INFO nova.compute.manager [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Terminating instance [ 921.148668] env[68217]: DEBUG oslo_vmware.api [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961531, 'name': PowerOnVM_Task, 'duration_secs': 0.595501} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.149063] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.149355] env[68217]: INFO nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Took 5.82 seconds to spawn the instance on the hypervisor. [ 921.149542] env[68217]: DEBUG nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 921.150327] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3159ddc6-ed8b-4495-8484-72d3b32b6bef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.286729] env[68217]: DEBUG oslo_vmware.api [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961532, 'name': PowerOnVM_Task, 'duration_secs': 0.517765} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.288028] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.288028] env[68217]: INFO nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Took 8.47 seconds to spawn the instance on the hypervisor. [ 921.288028] env[68217]: DEBUG nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 921.288595] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ad940c-f603-4421-84ef-7b29ed53547d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.313518] env[68217]: DEBUG nova.compute.manager [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Received event network-vif-plugged-41cb41cd-7c04-4409-948d-b45a5441a4f4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.313518] env[68217]: DEBUG oslo_concurrency.lockutils [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] Acquiring lock "213b720b-b782-41c4-b60d-ef0af4b62932-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 921.313518] env[68217]: DEBUG oslo_concurrency.lockutils [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] Lock "213b720b-b782-41c4-b60d-ef0af4b62932-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 921.313518] env[68217]: DEBUG oslo_concurrency.lockutils [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] Lock "213b720b-b782-41c4-b60d-ef0af4b62932-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 921.313518] env[68217]: DEBUG nova.compute.manager [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] No waiting events found dispatching network-vif-plugged-41cb41cd-7c04-4409-948d-b45a5441a4f4 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 921.313914] env[68217]: WARNING nova.compute.manager [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Received unexpected event network-vif-plugged-41cb41cd-7c04-4409-948d-b45a5441a4f4 for instance with vm_state building and task_state spawning. [ 921.313914] env[68217]: DEBUG nova.compute.manager [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Received event network-changed-41cb41cd-7c04-4409-948d-b45a5441a4f4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 921.314047] env[68217]: DEBUG nova.compute.manager [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Refreshing instance network info cache due to event network-changed-41cb41cd-7c04-4409-948d-b45a5441a4f4. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 921.314171] env[68217]: DEBUG oslo_concurrency.lockutils [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] Acquiring lock "refresh_cache-213b720b-b782-41c4-b60d-ef0af4b62932" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.314304] env[68217]: DEBUG oslo_concurrency.lockutils [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] Acquired lock "refresh_cache-213b720b-b782-41c4-b60d-ef0af4b62932" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 921.314494] env[68217]: DEBUG nova.network.neutron [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Refreshing network info cache for port 41cb41cd-7c04-4409-948d-b45a5441a4f4 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 921.329559] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-213b720b-b782-41c4-b60d-ef0af4b62932" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 921.413757] env[68217]: DEBUG nova.compute.manager [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 921.413895] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 921.414807] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24d00d0f-2dfb-408b-bd68-7eed45129252 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.423375] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 921.423651] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-faf26e4b-3aec-476a-bf82-8189df366dd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.430024] env[68217]: DEBUG oslo_vmware.api [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 921.430024] env[68217]: value = "task-2961533" [ 921.430024] env[68217]: _type = "Task" [ 921.430024] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.438831] env[68217]: DEBUG oslo_vmware.api [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961533, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.667900] env[68217]: INFO nova.compute.manager [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Took 21.75 seconds to build instance. [ 921.710035] env[68217]: DEBUG nova.network.neutron [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Successfully updated port: efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 921.809854] env[68217]: INFO nova.compute.manager [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Took 23.53 seconds to build instance. [ 921.852513] env[68217]: DEBUG nova.network.neutron [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 921.936996] env[68217]: DEBUG nova.network.neutron [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.945135] env[68217]: DEBUG oslo_vmware.api [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961533, 'name': PowerOffVM_Task, 'duration_secs': 0.362123} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.946886] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 921.947075] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 921.947692] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5070d37e-fa0b-45fa-93a2-a6dc4aa4bc3f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.015557] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 922.015709] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 922.015886] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Deleting the datastore file [datastore2] 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 922.016267] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3033828-fc21-40d3-9bde-8267aa85ca57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.034595] env[68217]: DEBUG oslo_vmware.api [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 922.034595] env[68217]: value = "task-2961535" [ 922.034595] env[68217]: _type = "Task" [ 922.034595] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 922.047999] env[68217]: DEBUG oslo_vmware.api [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961535, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.149010] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9880f1a1-84ed-42ff-a0f1-30f51ee81f6a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.160019] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fd58e1-b172-4c17-9def-026a00d091f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.193213] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c8199c6-559f-460e-8010-5dcb1e58e0e7 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.284s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.193213] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44939689-1160-4e17-9c37-6fe92ed35f32 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.200955] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ed353b6-1ceb-4330-b5f4-573fd66dfefe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 922.215547] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 922.216091] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.216297] env[68217]: DEBUG nova.network.neutron [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.217782] env[68217]: DEBUG nova.compute.provider_tree [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 922.314658] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8a4a98ac-8b8d-41c7-80ec-4a329bd39583 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 25.040s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 922.440150] env[68217]: DEBUG oslo_concurrency.lockutils [req-9acfe520-ebcc-45fa-84ce-bb1e02fe6b1c req-0a236f75-6cc2-44b4-a8a9-fd9d6b91fd8a service nova] Releasing lock "refresh_cache-213b720b-b782-41c4-b60d-ef0af4b62932" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 922.440546] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-213b720b-b782-41c4-b60d-ef0af4b62932" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 922.440708] env[68217]: DEBUG nova.network.neutron [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 922.546210] env[68217]: DEBUG oslo_vmware.api [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961535, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196081} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 922.546210] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 922.546210] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 922.546210] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 922.546368] env[68217]: INFO nova.compute.manager [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Took 1.13 seconds to destroy the instance on the hypervisor. [ 922.546610] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 922.546800] env[68217]: DEBUG nova.compute.manager [-] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 922.546891] env[68217]: DEBUG nova.network.neutron [-] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 922.722404] env[68217]: DEBUG nova.scheduler.client.report [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 922.811094] env[68217]: WARNING nova.network.neutron [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] 246af4c9-69b4-4542-84b9-2afe67cf297a already exists in list: networks containing: ['246af4c9-69b4-4542-84b9-2afe67cf297a']. ignoring it [ 922.975089] env[68217]: DEBUG nova.network.neutron [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 923.137810] env[68217]: DEBUG nova.compute.manager [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 923.142859] env[68217]: DEBUG nova.network.neutron [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Updating instance_info_cache with network_info: [{"id": "41cb41cd-7c04-4409-948d-b45a5441a4f4", "address": "fa:16:3e:e7:0e:98", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41cb41cd-7c", "ovs_interfaceid": "41cb41cd-7c04-4409-948d-b45a5441a4f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.229906] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.532s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.230457] env[68217]: DEBUG nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 923.237025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.371s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.237025] env[68217]: DEBUG nova.objects.instance [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lazy-loading 'resources' on Instance uuid 650ebd16-da81-475e-a82a-7fa5fb2880bc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 923.444230] env[68217]: DEBUG nova.network.neutron [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "efb63bec-2d1f-41ad-b7bc-f9dc46cdd111", "address": "fa:16:3e:ab:4a:bd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb63bec-2d", "ovs_interfaceid": "efb63bec-2d1f-41ad-b7bc-f9dc46cdd111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.600204] env[68217]: DEBUG nova.network.neutron [-] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 923.649197] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-213b720b-b782-41c4-b60d-ef0af4b62932" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.649197] env[68217]: DEBUG nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Instance network_info: |[{"id": "41cb41cd-7c04-4409-948d-b45a5441a4f4", "address": "fa:16:3e:e7:0e:98", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap41cb41cd-7c", "ovs_interfaceid": "41cb41cd-7c04-4409-948d-b45a5441a4f4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 923.649197] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:0e:98', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '41cb41cd-7c04-4409-948d-b45a5441a4f4', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 923.657058] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 923.658389] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 923.659824] env[68217]: DEBUG nova.compute.manager [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received event network-vif-plugged-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 923.661220] env[68217]: DEBUG oslo_concurrency.lockutils [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.661220] env[68217]: DEBUG oslo_concurrency.lockutils [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] Lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 923.662033] env[68217]: DEBUG oslo_concurrency.lockutils [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] Lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 923.662033] env[68217]: DEBUG nova.compute.manager [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] No waiting events found dispatching network-vif-plugged-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 923.662033] env[68217]: WARNING nova.compute.manager [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received unexpected event network-vif-plugged-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 for instance with vm_state active and task_state None. [ 923.662033] env[68217]: DEBUG nova.compute.manager [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received event network-changed-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 923.662033] env[68217]: DEBUG nova.compute.manager [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Refreshing instance network info cache due to event network-changed-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 923.662251] env[68217]: DEBUG oslo_concurrency.lockutils [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] Acquiring lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.663761] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 923.663995] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4ad68c16-cadb-4b03-b3df-45745b3952f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.687630] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 923.687630] env[68217]: value = "task-2961536" [ 923.687630] env[68217]: _type = "Task" [ 923.687630] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 923.695956] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961536, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.744019] env[68217]: DEBUG nova.compute.utils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 923.746349] env[68217]: INFO nova.compute.manager [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Rebuilding instance [ 923.748054] env[68217]: DEBUG nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 923.748218] env[68217]: DEBUG nova.network.neutron [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 923.817506] env[68217]: DEBUG nova.compute.manager [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 923.818423] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef5deb21-0be1-41b0-ab1f-0ddfa07b3b53 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.839834] env[68217]: DEBUG nova.policy [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fd08981ea724019826d597a1c8b4ecd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6233e9874c41329f81c990f8bc72b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 923.947239] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 923.947947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 923.948113] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.948380] env[68217]: DEBUG oslo_concurrency.lockutils [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] Acquired lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 923.949934] env[68217]: DEBUG nova.network.neutron [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Refreshing network info cache for port efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 923.953020] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6303948c-1b79-42cc-9663-3b0ff4723e17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.969016] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 923.969301] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 923.969502] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 923.969716] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 923.969884] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 923.970094] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 923.970340] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 923.970532] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 923.970727] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 923.970914] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 923.971149] env[68217]: DEBUG nova.virt.hardware [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 923.977663] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Reconfiguring VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 923.981281] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb536637-fed9-4789-aa3e-e01c667807be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.000600] env[68217]: DEBUG oslo_vmware.api [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 924.000600] env[68217]: value = "task-2961537" [ 924.000600] env[68217]: _type = "Task" [ 924.000600] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.012114] env[68217]: DEBUG oslo_vmware.api [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961537, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.102742] env[68217]: INFO nova.compute.manager [-] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Took 1.56 seconds to deallocate network for instance. [ 924.200647] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961536, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.242503] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cdfc36d-0997-4eb4-b785-e1d1cd5417f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.251337] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46903aff-b7ba-4e01-969c-3283fba73dac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.257997] env[68217]: DEBUG nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 924.298554] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d3a7fe-ab4f-48ef-9e16-5473a2df0961 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.303259] env[68217]: DEBUG nova.network.neutron [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Successfully created port: f195768f-440b-4ba7-b21f-04f548d7bc10 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 924.312100] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1652d6f8-6011-4fd6-8cab-fc209a41ab05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.328710] env[68217]: DEBUG nova.compute.provider_tree [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 924.514307] env[68217]: DEBUG oslo_vmware.api [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961537, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.613934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 924.703688] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961536, 'name': CreateVM_Task, 'duration_secs': 0.64296} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.703879] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 924.704708] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.704901] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 924.705227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 924.705491] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-861d8fdc-ad29-4673-bb24-62992373f1e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.710811] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 924.710811] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528e1b05-1695-5336-ee58-cefef0d938b4" [ 924.710811] env[68217]: _type = "Task" [ 924.710811] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.719169] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528e1b05-1695-5336-ee58-cefef0d938b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.783683] env[68217]: DEBUG nova.network.neutron [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updated VIF entry in instance network info cache for port efb63bec-2d1f-41ad-b7bc-f9dc46cdd111. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 924.784652] env[68217]: DEBUG nova.network.neutron [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "efb63bec-2d1f-41ad-b7bc-f9dc46cdd111", "address": "fa:16:3e:ab:4a:bd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapefb63bec-2d", "ovs_interfaceid": "efb63bec-2d1f-41ad-b7bc-f9dc46cdd111", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 924.833083] env[68217]: DEBUG nova.scheduler.client.report [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 924.839762] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 924.840064] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-83efce29-4763-4211-8ba3-2a885372aaf0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.848777] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 924.848777] env[68217]: value = "task-2961538" [ 924.848777] env[68217]: _type = "Task" [ 924.848777] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.858986] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961538, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.015490] env[68217]: DEBUG oslo_vmware.api [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961537, 'name': ReconfigVM_Task, 'duration_secs': 0.824622} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.016115] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.016340] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Reconfigured VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 925.222218] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528e1b05-1695-5336-ee58-cefef0d938b4, 'name': SearchDatastore_Task, 'duration_secs': 0.011932} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.222472] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.222714] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 925.222934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.223092] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 925.223272] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 925.223537] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7ab98804-91c1-417e-a49b-504f97e18c67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.237194] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 925.237354] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 925.238105] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a743d572-7009-4515-9ba0-a66df96b0148 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.244297] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 925.244297] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e23e90-3ba6-1870-b23a-1598408e0469" [ 925.244297] env[68217]: _type = "Task" [ 925.244297] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.252545] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e23e90-3ba6-1870-b23a-1598408e0469, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.267895] env[68217]: DEBUG nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 925.286394] env[68217]: DEBUG oslo_concurrency.lockutils [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] Releasing lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 925.286937] env[68217]: DEBUG nova.compute.manager [req-107fde11-b55f-4c21-82db-f0fa2526c07c req-e06bc671-758d-4fca-bacd-8540769dbb6f service nova] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Received event network-vif-deleted-1f77eb32-6eb4-42c4-8065-a7247f2c0c4a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 925.289195] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 925.289853] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 925.289853] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 925.289853] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 925.289989] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 925.290190] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 925.290236] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 925.290374] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 925.290540] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 925.290701] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 925.290874] env[68217]: DEBUG nova.virt.hardware [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 925.291730] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-328d9988-488c-4980-ac58-9b0b09e5d467 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.300337] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b2abdad-be99-4064-8ef7-0ab75062d501 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.340846] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.106s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.343439] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.730s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 925.346037] env[68217]: INFO nova.compute.claims [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 925.358424] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961538, 'name': PowerOffVM_Task, 'duration_secs': 0.336339} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.358424] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 925.358556] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 925.359300] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-618fe4f9-25ad-4859-8465-6143920d6762 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.366062] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 925.367102] env[68217]: INFO nova.scheduler.client.report [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted allocations for instance 650ebd16-da81-475e-a82a-7fa5fb2880bc [ 925.368400] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9158731-63df-4a6b-8596-bdc495c6d181 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.394951] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 925.394951] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 925.394951] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Deleting the datastore file [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 925.395675] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-856d911a-48f9-4ba8-b042-044ebd849cca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.402525] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 925.402525] env[68217]: value = "task-2961540" [ 925.402525] env[68217]: _type = "Task" [ 925.402525] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.420021] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961540, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.521678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-809e0b75-8021-4afa-af9c-5e27354b98b4 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-580e6909-7d05-447a-a378-f0b8b71f059a-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.007s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.754740] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e23e90-3ba6-1870-b23a-1598408e0469, 'name': SearchDatastore_Task, 'duration_secs': 0.008955} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.755569] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03fe678d-3386-4dfa-b940-faba76a8e3b4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.764790] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 925.764790] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bfac5-56fb-7c7d-1cc6-7ade27bc031a" [ 925.764790] env[68217]: _type = "Task" [ 925.764790] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 925.774193] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bfac5-56fb-7c7d-1cc6-7ade27bc031a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.876600] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e8df2855-ad73-41f3-b42b-958ac570e352 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "650ebd16-da81-475e-a82a-7fa5fb2880bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.688s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 925.939170] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961540, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090877} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 925.939170] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 925.939170] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 925.939170] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 925.987754] env[68217]: DEBUG nova.network.neutron [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Successfully updated port: f195768f-440b-4ba7-b21f-04f548d7bc10 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 926.189559] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.190110] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.277775] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529bfac5-56fb-7c7d-1cc6-7ade27bc031a, 'name': SearchDatastore_Task, 'duration_secs': 0.015953} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.279007] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 926.279007] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 213b720b-b782-41c4-b60d-ef0af4b62932/213b720b-b782-41c4-b60d-ef0af4b62932.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 926.279007] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4b0cd53d-49dc-4e8d-aa91-1abeed76a04d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.285656] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 926.285656] env[68217]: value = "task-2961541" [ 926.285656] env[68217]: _type = "Task" [ 926.285656] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.296823] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961541, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.490126] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "refresh_cache-fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.490354] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "refresh_cache-fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 926.490424] env[68217]: DEBUG nova.network.neutron [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 926.528806] env[68217]: DEBUG nova.compute.manager [req-0c3b91be-fed0-4cbc-8ea2-4f92790dc8ab req-9d3ad12d-05f8-476e-8772-787d41a9efa1 service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Received event network-vif-plugged-f195768f-440b-4ba7-b21f-04f548d7bc10 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 926.529202] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c3b91be-fed0-4cbc-8ea2-4f92790dc8ab req-9d3ad12d-05f8-476e-8772-787d41a9efa1 service nova] Acquiring lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 926.530199] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c3b91be-fed0-4cbc-8ea2-4f92790dc8ab req-9d3ad12d-05f8-476e-8772-787d41a9efa1 service nova] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 926.530199] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c3b91be-fed0-4cbc-8ea2-4f92790dc8ab req-9d3ad12d-05f8-476e-8772-787d41a9efa1 service nova] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 926.530199] env[68217]: DEBUG nova.compute.manager [req-0c3b91be-fed0-4cbc-8ea2-4f92790dc8ab req-9d3ad12d-05f8-476e-8772-787d41a9efa1 service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] No waiting events found dispatching network-vif-plugged-f195768f-440b-4ba7-b21f-04f548d7bc10 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 926.530199] env[68217]: WARNING nova.compute.manager [req-0c3b91be-fed0-4cbc-8ea2-4f92790dc8ab req-9d3ad12d-05f8-476e-8772-787d41a9efa1 service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Received unexpected event network-vif-plugged-f195768f-440b-4ba7-b21f-04f548d7bc10 for instance with vm_state building and task_state spawning. [ 926.694636] env[68217]: DEBUG nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 926.774668] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d9789a7-73b3-475a-bf67-8ffc84fb4122 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.783476] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc7a54d-7b16-4057-9245-fda36a581b87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.797085] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961541, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.475732} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 926.825387] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 213b720b-b782-41c4-b60d-ef0af4b62932/213b720b-b782-41c4-b60d-ef0af4b62932.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 926.825387] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 926.828122] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ad43522d-68e4-49d2-86ef-58ca771fe217 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.829518] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d96a442-26f2-4c23-94d9-cab1a66eee65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.842772] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ed1210-7672-4372-b980-2b8d78abcbb8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.845873] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 926.845873] env[68217]: value = "task-2961542" [ 926.845873] env[68217]: _type = "Task" [ 926.845873] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.861633] env[68217]: DEBUG nova.compute.provider_tree [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 926.868882] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961542, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.983866] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 926.984179] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.984357] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 926.984636] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.984785] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 926.984968] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 926.985209] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 926.985369] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 926.985534] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 926.985698] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 926.985867] env[68217]: DEBUG nova.virt.hardware [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 926.986734] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46b76533-11d1-40a0-8b76-dee9c80d4c44 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.996789] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bef3ff9-12c8-42b7-9883-99a3d926a733 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.011554] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.017642] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 927.020642] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.021758] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab905413-2ed4-4ac0-b245-8c7fc23fad3b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.036380] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "d0d8ed27-003e-43e2-8a07-041420a2c758" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.036627] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.038085] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "d0d8ed27-003e-43e2-8a07-041420a2c758-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.038085] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.038085] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.039592] env[68217]: INFO nova.compute.manager [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Terminating instance [ 927.047954] env[68217]: DEBUG nova.network.neutron [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 927.052170] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.052170] env[68217]: value = "task-2961543" [ 927.052170] env[68217]: _type = "Task" [ 927.052170] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.060943] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961543, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.216994] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.336037] env[68217]: DEBUG nova.network.neutron [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Updating instance_info_cache with network_info: [{"id": "f195768f-440b-4ba7-b21f-04f548d7bc10", "address": "fa:16:3e:de:0f:f3", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf195768f-44", "ovs_interfaceid": "f195768f-440b-4ba7-b21f-04f548d7bc10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.337513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "d3468ec2-6548-400a-b247-a6ab1156cab5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.337740] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.337937] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "d3468ec2-6548-400a-b247-a6ab1156cab5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 927.338533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.338718] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.340976] env[68217]: INFO nova.compute.manager [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Terminating instance [ 927.357642] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961542, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06374} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.357901] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 927.358669] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e94948fe-9680-47d6-9559-f6e33db74a5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.372802] env[68217]: DEBUG nova.scheduler.client.report [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 927.385436] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Reconfiguring VM instance instance-00000052 to attach disk [datastore1] 213b720b-b782-41c4-b60d-ef0af4b62932/213b720b-b782-41c4-b60d-ef0af4b62932.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 927.385942] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a43b9a8c-59a8-401b-8e99-1691936f8804 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.408916] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 927.408916] env[68217]: value = "task-2961544" [ 927.408916] env[68217]: _type = "Task" [ 927.408916] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.420207] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961544, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.543117] env[68217]: DEBUG nova.compute.manager [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 927.543656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.544261] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dfa2d6d-0deb-4fdc-be43-bdb5a39d3cab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.553904] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.557438] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-16452d74-1041-49ff-958d-da221bc3d887 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.564132] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961543, 'name': CreateVM_Task, 'duration_secs': 0.340438} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.565851] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.565851] env[68217]: DEBUG oslo_vmware.api [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 927.565851] env[68217]: value = "task-2961545" [ 927.565851] env[68217]: _type = "Task" [ 927.565851] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.566487] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.566648] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 927.567025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 927.567292] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-381f3891-544e-4817-8775-bbb30dbd21ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.574461] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 927.574461] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224e904-4035-4d12-222b-5b8a36c59077" [ 927.574461] env[68217]: _type = "Task" [ 927.574461] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.577859] env[68217]: DEBUG oslo_vmware.api [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961545, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.585541] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224e904-4035-4d12-222b-5b8a36c59077, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.841524] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "refresh_cache-fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 927.841864] env[68217]: DEBUG nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Instance network_info: |[{"id": "f195768f-440b-4ba7-b21f-04f548d7bc10", "address": "fa:16:3e:de:0f:f3", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf195768f-44", "ovs_interfaceid": "f195768f-440b-4ba7-b21f-04f548d7bc10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 927.842329] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:de:0f:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f195768f-440b-4ba7-b21f-04f548d7bc10', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 927.852106] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 927.852855] env[68217]: DEBUG nova.compute.manager [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 927.852912] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 927.853631] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 927.853888] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4365ec14-2842-4727-aec5-e5ad1ef4d037 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.856755] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2b2236fb-8c2f-4515-bc42-65dee37a72e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.876585] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 927.876585] env[68217]: value = "task-2961546" [ 927.876585] env[68217]: _type = "Task" [ 927.876585] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.879603] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 927.882947] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df9e5ae0-5fc9-4bb9-92df-72e338f5bb1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.886996] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.543s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 927.887587] env[68217]: DEBUG nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 927.896027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 17.245s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 927.898452] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961546, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.898611] env[68217]: DEBUG oslo_vmware.api [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 927.898611] env[68217]: value = "task-2961547" [ 927.898611] env[68217]: _type = "Task" [ 927.898611] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.914386] env[68217]: DEBUG oslo_vmware.api [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961547, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.926585] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961544, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.022557] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-580e6909-7d05-447a-a378-f0b8b71f059a-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.022898] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-580e6909-7d05-447a-a378-f0b8b71f059a-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.048534] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "4a555172-a2a3-410b-a0fe-38964cee9a22" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 928.048599] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 928.081915] env[68217]: DEBUG oslo_vmware.api [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961545, 'name': PowerOffVM_Task, 'duration_secs': 0.382795} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.085450] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 928.085656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 928.085947] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-60671faa-0c7e-41b9-afdb-8491df24ca59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.093809] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5224e904-4035-4d12-222b-5b8a36c59077, 'name': SearchDatastore_Task, 'duration_secs': 0.018292} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.094121] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.094473] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.094650] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.094821] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.095036] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 928.095329] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a147fcab-e3f3-4d3b-b4fd-a15da38788f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.103788] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 928.103988] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 928.104762] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30d14321-827d-4c03-82ad-7173f30e93b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.110549] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 928.110549] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524428e9-e9be-599b-a706-a7624136dfe6" [ 928.110549] env[68217]: _type = "Task" [ 928.110549] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.120808] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524428e9-e9be-599b-a706-a7624136dfe6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.173149] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 928.173386] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 928.173566] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Deleting the datastore file [datastore1] d0d8ed27-003e-43e2-8a07-041420a2c758 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.173837] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-68762642-e5f5-46d5-8629-e2d45ec10ae8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.181030] env[68217]: DEBUG oslo_vmware.api [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for the task: (returnval){ [ 928.181030] env[68217]: value = "task-2961549" [ 928.181030] env[68217]: _type = "Task" [ 928.181030] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.190022] env[68217]: DEBUG oslo_vmware.api [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961549, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.388930] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961546, 'name': CreateVM_Task, 'duration_secs': 0.371021} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.389235] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 928.389806] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.389967] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.390288] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 928.390542] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1cf2901a-2699-4951-b3f1-2c57af3c1fc5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.395354] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 928.395354] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52780547-0659-7f27-350c-f8f320ac714c" [ 928.395354] env[68217]: _type = "Task" [ 928.395354] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.400361] env[68217]: DEBUG nova.objects.instance [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lazy-loading 'migration_context' on Instance uuid fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.402549] env[68217]: DEBUG nova.compute.utils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 928.407059] env[68217]: DEBUG nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 928.407234] env[68217]: DEBUG nova.network.neutron [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 928.411829] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52780547-0659-7f27-350c-f8f320ac714c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.421460] env[68217]: DEBUG oslo_vmware.api [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961547, 'name': PowerOffVM_Task, 'duration_secs': 0.312808} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.421595] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 928.422143] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 928.422605] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0bfdc398-e5f9-47f0-bc64-219070532724 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.427069] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961544, 'name': ReconfigVM_Task, 'duration_secs': 0.76367} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.427753] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Reconfigured VM instance instance-00000052 to attach disk [datastore1] 213b720b-b782-41c4-b60d-ef0af4b62932/213b720b-b782-41c4-b60d-ef0af4b62932.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.428250] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3e0a038a-3a6f-4646-8f53-e873e44060ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.434334] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 928.434334] env[68217]: value = "task-2961551" [ 928.434334] env[68217]: _type = "Task" [ 928.434334] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.442148] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961551, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.450659] env[68217]: DEBUG nova.policy [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34286c10b8b242fb83eb4f1493b9477b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90ad2b0a8a0743ca80a0685bf56e0446', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 928.495414] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 928.495638] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 928.495838] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleting the datastore file [datastore2] d3468ec2-6548-400a-b247-a6ab1156cab5 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 928.496144] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4bd0e9a8-27c2-4537-87de-6cb5156ee140 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.505831] env[68217]: DEBUG oslo_vmware.api [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 928.505831] env[68217]: value = "task-2961552" [ 928.505831] env[68217]: _type = "Task" [ 928.505831] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.515815] env[68217]: DEBUG oslo_vmware.api [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961552, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.529018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.529018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.529018] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcee4298-8733-4a90-b7bc-d34e91cd1ff8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.558198] env[68217]: DEBUG nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 928.560677] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b108a2b-a4c8-4d49-9fd6-6457bb382af9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.594987] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Reconfiguring VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 928.596891] env[68217]: DEBUG nova.compute.manager [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Received event network-changed-f195768f-440b-4ba7-b21f-04f548d7bc10 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 928.597155] env[68217]: DEBUG nova.compute.manager [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Refreshing instance network info cache due to event network-changed-f195768f-440b-4ba7-b21f-04f548d7bc10. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 928.597406] env[68217]: DEBUG oslo_concurrency.lockutils [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] Acquiring lock "refresh_cache-fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.597558] env[68217]: DEBUG oslo_concurrency.lockutils [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] Acquired lock "refresh_cache-fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 928.597744] env[68217]: DEBUG nova.network.neutron [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Refreshing network info cache for port f195768f-440b-4ba7-b21f-04f548d7bc10 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 928.601047] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6281013b-dcc6-44aa-b382-8be84d5bd6f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.630671] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524428e9-e9be-599b-a706-a7624136dfe6, 'name': SearchDatastore_Task, 'duration_secs': 0.009215} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.633429] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 928.633429] env[68217]: value = "task-2961553" [ 928.633429] env[68217]: _type = "Task" [ 928.633429] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.634268] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5d1b8a2c-fdda-4306-8ed8-ff28723c1d20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.644297] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 928.644297] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529be029-a299-6a05-5a12-3077a9c5e78c" [ 928.644297] env[68217]: _type = "Task" [ 928.644297] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.651970] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.661472] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529be029-a299-6a05-5a12-3077a9c5e78c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.691054] env[68217]: DEBUG oslo_vmware.api [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Task: {'id': task-2961549, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158537} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.691321] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 928.691501] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 928.691675] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 928.691858] env[68217]: INFO nova.compute.manager [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Took 1.15 seconds to destroy the instance on the hypervisor. [ 928.692098] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 928.692308] env[68217]: DEBUG nova.compute.manager [-] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 928.692393] env[68217]: DEBUG nova.network.neutron [-] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 928.848244] env[68217]: DEBUG nova.network.neutron [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Updated VIF entry in instance network info cache for port f195768f-440b-4ba7-b21f-04f548d7bc10. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 928.848626] env[68217]: DEBUG nova.network.neutron [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Updating instance_info_cache with network_info: [{"id": "f195768f-440b-4ba7-b21f-04f548d7bc10", "address": "fa:16:3e:de:0f:f3", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf195768f-44", "ovs_interfaceid": "f195768f-440b-4ba7-b21f-04f548d7bc10", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.908493] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52780547-0659-7f27-350c-f8f320ac714c, 'name': SearchDatastore_Task, 'duration_secs': 0.008829} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.908925] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 928.909170] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 928.909377] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.912971] env[68217]: DEBUG nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 928.946786] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961551, 'name': Rename_Task, 'duration_secs': 0.302383} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.947312] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 928.947441] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f7a2d637-b4b6-495a-976f-2217b9104fa8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.956811] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 928.956811] env[68217]: value = "task-2961554" [ 928.956811] env[68217]: _type = "Task" [ 928.956811] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.964579] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961554, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.018190] env[68217]: DEBUG oslo_vmware.api [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961552, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171897} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.018190] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 929.018190] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 929.021562] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 929.021562] env[68217]: INFO nova.compute.manager [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Took 1.17 seconds to destroy the instance on the hypervisor. [ 929.021562] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 929.021562] env[68217]: DEBUG nova.compute.manager [-] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 929.021562] env[68217]: DEBUG nova.network.neutron [-] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 929.089056] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 929.151580] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.156505] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529be029-a299-6a05-5a12-3077a9c5e78c, 'name': SearchDatastore_Task, 'duration_secs': 0.02402} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.156749] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.157026] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.157296] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 929.157514] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 929.157743] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e07fd50-b516-4ac3-82af-11aa9b2175a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.161755] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89ceaee1-8f92-471a-9ce0-ff83ec3ea108 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.169592] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 929.169592] env[68217]: value = "task-2961555" [ 929.169592] env[68217]: _type = "Task" [ 929.169592] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.174017] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 929.174207] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 929.177543] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fa750e38-6937-4a73-ab56-a24f7e1375c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.183282] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961555, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.186717] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 929.186717] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524744bb-9c71-8faa-7459-9be6468a038f" [ 929.186717] env[68217]: _type = "Task" [ 929.186717] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.197188] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524744bb-9c71-8faa-7459-9be6468a038f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.352825] env[68217]: DEBUG oslo_concurrency.lockutils [req-f173405a-85e9-4206-aab7-69f1665ca1d1 req-f72d0268-3f18-4122-94cf-40fafcb4bcfd service nova] Releasing lock "refresh_cache-fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 929.355802] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0711c187-1d5b-475b-b154-11f61ee6e60c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.363858] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60a36e4e-4eda-4d87-b027-263bcaa8eb02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.396118] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a53b65-b9cc-4203-86b9-47a36f39c8af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.404304] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c43e101c-da56-4b4b-abcc-ea318e60ed81 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.424904] env[68217]: DEBUG nova.compute.provider_tree [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.468736] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961554, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.523214] env[68217]: DEBUG nova.network.neutron [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Successfully created port: 772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.551354] env[68217]: DEBUG nova.compute.manager [req-d0dba58b-501e-451e-a35f-3195fc9d12ae req-81d0e410-c6ff-4ee0-9679-9bb8f8a45a96 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Received event network-vif-deleted-2edb7766-2fb2-49a8-8100-5abcb17581eb {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 929.551575] env[68217]: INFO nova.compute.manager [req-d0dba58b-501e-451e-a35f-3195fc9d12ae req-81d0e410-c6ff-4ee0-9679-9bb8f8a45a96 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Neutron deleted interface 2edb7766-2fb2-49a8-8100-5abcb17581eb; detaching it from the instance and deleting it from the info cache [ 929.551835] env[68217]: DEBUG nova.network.neutron [req-d0dba58b-501e-451e-a35f-3195fc9d12ae req-81d0e410-c6ff-4ee0-9679-9bb8f8a45a96 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.649767] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.682447] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961555, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.699040] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524744bb-9c71-8faa-7459-9be6468a038f, 'name': SearchDatastore_Task, 'duration_secs': 0.011797} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.699861] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e218f9af-3db7-4035-970f-f7047d79e1a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.708507] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 929.708507] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab41d8-034a-bc88-23e6-0477343a4826" [ 929.708507] env[68217]: _type = "Task" [ 929.708507] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.718810] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab41d8-034a-bc88-23e6-0477343a4826, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.770566] env[68217]: DEBUG nova.network.neutron [-] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.930666] env[68217]: DEBUG nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 929.932623] env[68217]: DEBUG nova.scheduler.client.report [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 929.971887] env[68217]: DEBUG oslo_vmware.api [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961554, 'name': PowerOnVM_Task, 'duration_secs': 0.620656} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.975292] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 929.975680] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 929.975983] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 929.976454] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 929.976727] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 929.976980] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 929.977351] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 929.978558] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 929.981029] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 929.981029] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 929.981029] env[68217]: DEBUG nova.virt.hardware [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 929.981029] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.981029] env[68217]: INFO nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Took 10.25 seconds to spawn the instance on the hypervisor. [ 929.981029] env[68217]: DEBUG nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 929.981029] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1b8581-1f0a-4fb2-bb3f-6a7cc777fd73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.986703] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207f5e1e-972e-4fef-a7ec-744eb047b9f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.996876] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a85f1238-2a20-40d8-a018-b3255e651354 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.054844] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4a0e8ab5-513c-4d70-881b-0ffa6fc2649e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.064513] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-add04b17-bf4d-4d56-bd33-169145778c51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.107758] env[68217]: DEBUG nova.compute.manager [req-d0dba58b-501e-451e-a35f-3195fc9d12ae req-81d0e410-c6ff-4ee0-9679-9bb8f8a45a96 service nova] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Detach interface failed, port_id=2edb7766-2fb2-49a8-8100-5abcb17581eb, reason: Instance d3468ec2-6548-400a-b247-a6ab1156cab5 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 930.151354] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.180066] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961555, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.61407} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.180255] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.180471] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.180794] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1512fae-22d7-47ba-885f-211aab4c1f57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.187308] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 930.187308] env[68217]: value = "task-2961556" [ 930.187308] env[68217]: _type = "Task" [ 930.187308] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.196379] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961556, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.219948] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab41d8-034a-bc88-23e6-0477343a4826, 'name': SearchDatastore_Task, 'duration_secs': 0.053528} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.220334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 930.220656] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] fab7d1eb-ef05-4498-aa6d-a524c3bb59c8/fab7d1eb-ef05-4498-aa6d-a524c3bb59c8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 930.220979] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47f9b350-84fb-45cf-9bea-46596a8b1510 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.228164] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 930.228164] env[68217]: value = "task-2961557" [ 930.228164] env[68217]: _type = "Task" [ 930.228164] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.236743] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.275121] env[68217]: INFO nova.compute.manager [-] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Took 1.26 seconds to deallocate network for instance. [ 930.512545] env[68217]: INFO nova.compute.manager [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Took 27.08 seconds to build instance. [ 930.630035] env[68217]: DEBUG nova.network.neutron [-] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.665640] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.675386] env[68217]: DEBUG nova.compute.manager [req-0c9e0663-31e7-4a4f-99ae-0d049c9a38af req-16ddc633-f632-4e5b-a4b0-00a1c5bf118b service nova] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Received event network-vif-deleted-bde1de37-ba7a-4f49-94b6-85acc11e39a6 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 930.697894] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961556, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.244809} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.697894] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.698702] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0764ce-b629-4b53-bd78-93185dd31d70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.719101] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.719914] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3570c987-df5d-4d49-b9c1-9580d4fac513 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.745174] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 930.745174] env[68217]: value = "task-2961558" [ 930.745174] env[68217]: _type = "Task" [ 930.745174] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.748442] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961557, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.781976] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 930.946119] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 3.050s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 930.953460] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 18.183s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 930.953739] env[68217]: DEBUG nova.objects.instance [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lazy-loading 'resources' on Instance uuid 58c15727-79ae-404f-a054-d71e3be498cc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.015381] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a5780bbf-1117-48e5-885a-3091134e2679 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.604s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 931.133732] env[68217]: INFO nova.compute.manager [-] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Took 2.44 seconds to deallocate network for instance. [ 931.154688] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.243533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "213b720b-b782-41c4-b60d-ef0af4b62932" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.243533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 931.243533] env[68217]: DEBUG nova.compute.manager [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 931.243533] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b07d98a-b1d6-43a1-8c41-1e352981a30c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.249767] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961557, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.982787} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.253939] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] fab7d1eb-ef05-4498-aa6d-a524c3bb59c8/fab7d1eb-ef05-4498-aa6d-a524c3bb59c8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 931.253939] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 931.255102] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0c5931b-975b-4d56-942e-c9a3a798448f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.256989] env[68217]: DEBUG nova.compute.manager [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 931.257768] env[68217]: DEBUG nova.objects.instance [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lazy-loading 'flavor' on Instance uuid 213b720b-b782-41c4-b60d-ef0af4b62932 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 931.268710] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961558, 'name': ReconfigVM_Task, 'duration_secs': 0.477699} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.268710] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c/4366c94c-164d-4cb9-8f04-7f26db4c0d3c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 931.268710] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 931.268710] env[68217]: value = "task-2961559" [ 931.268710] env[68217]: _type = "Task" [ 931.268710] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.268710] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-569d71fd-3bf2-41d9-ae14-bd6f8ba94b61 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.278591] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961559, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.280593] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 931.280593] env[68217]: value = "task-2961560" [ 931.280593] env[68217]: _type = "Task" [ 931.280593] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.292832] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961560, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.559048] env[68217]: DEBUG nova.network.neutron [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Successfully updated port: 772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.641068] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 931.653878] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.783664] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961559, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076805} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.786275] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 931.789371] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d17a4c-1015-4a1a-8fbd-75af13135d4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.812309] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Reconfiguring VM instance instance-00000053 to attach disk [datastore2] fab7d1eb-ef05-4498-aa6d-a524c3bb59c8/fab7d1eb-ef05-4498-aa6d-a524c3bb59c8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 931.817386] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6dc6436b-5d72-426b-9251-9be5be627b77 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.831202] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961560, 'name': Rename_Task, 'duration_secs': 0.132117} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.834180] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27f1c27c-7148-49cd-b5d8-9050d72d4985 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.837466] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 931.838057] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-96abfbdd-7f38-4b74-8359-caa1c1bee9a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.845289] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0e7b20d-9769-4085-8a96-58ee298d5a03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.848645] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 931.848645] env[68217]: value = "task-2961561" [ 931.848645] env[68217]: _type = "Task" [ 931.848645] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.850011] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 931.850011] env[68217]: value = "task-2961562" [ 931.850011] env[68217]: _type = "Task" [ 931.850011] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.883634] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ec31e0-fe4b-4ed3-820d-7915e8608172 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.892733] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961561, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.893260] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961562, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.897975] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41918096-9574-4b11-acb7-1af1f392c8c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.911724] env[68217]: DEBUG nova.compute.provider_tree [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 932.061374] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.061575] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.061748] env[68217]: DEBUG nova.network.neutron [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.153094] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.278058] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.278196] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6e265c20-5216-4caa-b23d-a9b1705e5c73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.286398] env[68217]: DEBUG oslo_vmware.api [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 932.286398] env[68217]: value = "task-2961563" [ 932.286398] env[68217]: _type = "Task" [ 932.286398] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.294857] env[68217]: DEBUG oslo_vmware.api [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961563, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.338199] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Acquiring lock "0552d616-a406-4dfa-8a70-82f39fb98bbc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.338544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.338850] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Acquiring lock "0552d616-a406-4dfa-8a70-82f39fb98bbc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.339084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.339252] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.341377] env[68217]: INFO nova.compute.manager [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Terminating instance [ 932.362011] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961561, 'name': ReconfigVM_Task, 'duration_secs': 0.278886} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.365052] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Reconfigured VM instance instance-00000053 to attach disk [datastore2] fab7d1eb-ef05-4498-aa6d-a524c3bb59c8/fab7d1eb-ef05-4498-aa6d-a524c3bb59c8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 932.365706] env[68217]: DEBUG oslo_vmware.api [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961562, 'name': PowerOnVM_Task, 'duration_secs': 0.453857} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.365924] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f7333d6-76d1-4022-9fc9-49bc03a27a7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.367541] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.367757] env[68217]: DEBUG nova.compute.manager [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 932.368812] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e40bf7-d1b8-4cc9-8604-83a6d310da7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.379523] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 932.379523] env[68217]: value = "task-2961564" [ 932.379523] env[68217]: _type = "Task" [ 932.379523] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.387932] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961564, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.415629] env[68217]: DEBUG nova.scheduler.client.report [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 932.487819] env[68217]: INFO nova.compute.manager [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Swapping old allocation on dict_keys(['42aedcce-ee61-45e1-bf10-c06056d1f548']) held by migration 9b4da72d-7edd-4b1b-af49-bccd923d81bd for instance [ 932.516374] env[68217]: DEBUG nova.scheduler.client.report [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Overwriting current allocation {'allocations': {'42aedcce-ee61-45e1-bf10-c06056d1f548': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 113}}, 'project_id': '726c3dbb291b49b39db3ef87e35cdfbd', 'user_id': '10c1ddac3d4946f88e9762a2bea8cfa9', 'consumer_generation': 1} on consumer fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb {{(pid=68217) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 932.614574] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.614574] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquired lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 932.614574] env[68217]: DEBUG nova.network.neutron [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 932.617162] env[68217]: DEBUG nova.network.neutron [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 932.652394] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.704165] env[68217]: DEBUG nova.compute.manager [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Received event network-vif-plugged-772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.704381] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] Acquiring lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.704605] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.704744] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.704945] env[68217]: DEBUG nova.compute.manager [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] No waiting events found dispatching network-vif-plugged-772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 932.705293] env[68217]: WARNING nova.compute.manager [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Received unexpected event network-vif-plugged-772726db-4382-4051-9a7d-abfc670d5c9b for instance with vm_state building and task_state spawning. [ 932.705467] env[68217]: DEBUG nova.compute.manager [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Received event network-changed-772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 932.705619] env[68217]: DEBUG nova.compute.manager [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Refreshing instance network info cache due to event network-changed-772726db-4382-4051-9a7d-abfc670d5c9b. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 932.705784] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] Acquiring lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.750442] env[68217]: DEBUG nova.network.neutron [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.796924] env[68217]: DEBUG oslo_vmware.api [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961563, 'name': PowerOffVM_Task, 'duration_secs': 0.388224} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.798035] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 932.798035] env[68217]: DEBUG nova.compute.manager [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 932.798179] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-419a1b64-4022-4d23-9045-70158eedcacb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.845064] env[68217]: DEBUG nova.compute.manager [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 932.845064] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 932.845343] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-474cb345-cbf8-4a3b-8187-dbaad7318a63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.852303] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 932.852303] env[68217]: value = "task-2961565" [ 932.852303] env[68217]: _type = "Task" [ 932.852303] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.860540] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961565, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.889120] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 932.893341] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961564, 'name': Rename_Task, 'duration_secs': 0.141266} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.893592] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 932.893823] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-876b7004-f6d9-4eaf-8137-35f84336c0e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.900579] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 932.900579] env[68217]: value = "task-2961566" [ 932.900579] env[68217]: _type = "Task" [ 932.900579] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.907958] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961566, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.922857] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.969s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 932.925253] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 16.716s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 932.925449] env[68217]: DEBUG nova.objects.instance [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 932.944417] env[68217]: INFO nova.scheduler.client.report [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Deleted allocations for instance 58c15727-79ae-404f-a054-d71e3be498cc [ 933.153880] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.253298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 933.253622] env[68217]: DEBUG nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Instance network_info: |[{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 933.253916] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] Acquired lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.254104] env[68217]: DEBUG nova.network.neutron [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Refreshing network info cache for port 772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 933.255334] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:49:ce:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '772726db-4382-4051-9a7d-abfc670d5c9b', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 933.268325] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating folder: Project (90ad2b0a8a0743ca80a0685bf56e0446). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 933.272295] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ff403b1c-76b5-4191-9485-3aab9ff7af9e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.284436] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Created folder: Project (90ad2b0a8a0743ca80a0685bf56e0446) in parent group-v594094. [ 933.284636] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating folder: Instances. Parent ref: group-v594325. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 933.284914] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5a424b15-10a1-41f2-940f-30727029af6a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.295698] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Created folder: Instances in parent group-v594325. [ 933.296342] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 933.296713] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 933.296958] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-71da75c4-8725-4e8c-8c6c-0835ea4f905e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.320850] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1be01a4a-1098-416b-af20-cbd94c0f6a94 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.079s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.326816] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 933.326816] env[68217]: value = "task-2961569" [ 933.326816] env[68217]: _type = "Task" [ 933.326816] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.339203] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961569, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.363916] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961565, 'name': PowerOffVM_Task, 'duration_secs': 0.276215} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.364287] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 933.364540] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 933.364841] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594156', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'name': 'volume-03332631-865e-4746-a213-a86fd1f1f4ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0552d616-a406-4dfa-8a70-82f39fb98bbc', 'attached_at': '', 'detached_at': '', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'serial': '03332631-865e-4746-a213-a86fd1f1f4ef'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 933.365783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1551344d-74a4-4643-8d0c-cd3bc956a356 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.389590] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7787711-bc02-413a-a4fa-530210ed2390 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.397986] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85eb778f-fd10-4716-aa34-8f823ea60cc1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.410605] env[68217]: DEBUG oslo_vmware.api [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961566, 'name': PowerOnVM_Task, 'duration_secs': 0.45561} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.424541] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 933.424953] env[68217]: INFO nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Took 8.16 seconds to spawn the instance on the hypervisor. [ 933.425255] env[68217]: DEBUG nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 933.426525] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b753b1b4-7cdb-44c2-97cf-592f8e6fab20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.429785] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b8e3394-f422-430b-85df-365b00b81cd7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.459446] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] The volume has not been displaced from its original location: [datastore1] volume-03332631-865e-4746-a213-a86fd1f1f4ef/volume-03332631-865e-4746-a213-a86fd1f1f4ef.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 933.466658] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Reconfiguring VM instance instance-0000002c to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 933.467470] env[68217]: DEBUG oslo_concurrency.lockutils [None req-072cbf12-924a-4db5-8fac-f286658ea849 tempest-ServersNegativeTestJSON-1692394979 tempest-ServersNegativeTestJSON-1692394979-project-member] Lock "58c15727-79ae-404f-a054-d71e3be498cc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 24.662s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.468633] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d68b9feb-c0d2-44a6-be32-a08e28afd990 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.490847] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 933.490847] env[68217]: value = "task-2961570" [ 933.490847] env[68217]: _type = "Task" [ 933.490847] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.499654] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961570, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.501078] env[68217]: DEBUG nova.network.neutron [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance_info_cache with network_info: [{"id": "747300c0-a758-483f-ba39-99efe6e731ec", "address": "fa:16:3e:2a:04:4a", "network": {"id": "39b87cd2-8e7d-4a27-b79f-9bc115810157", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.198", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "afb0cd3e48d6419f875a94a0a1856550", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a7188cb6-c9d7-449a-9362-ddb61a31feaf", "external-id": "nsx-vlan-transportzone-99", "segmentation_id": 99, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap747300c0-a7", "ovs_interfaceid": "747300c0-a758-483f-ba39-99efe6e731ec", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.655313] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.741626] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "213b720b-b782-41c4-b60d-ef0af4b62932" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.741875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.742091] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "213b720b-b782-41c4-b60d-ef0af4b62932-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 933.742278] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.742466] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.745982] env[68217]: INFO nova.compute.manager [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Terminating instance [ 933.836436] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961569, 'name': CreateVM_Task, 'duration_secs': 0.410945} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.836604] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 933.837297] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.837464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 933.837764] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 933.838024] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4221fc1b-ef29-4df5-a4de-84d619a2cd0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.842807] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 933.842807] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cc1680-c46b-bc00-82aa-f2d0d605b543" [ 933.842807] env[68217]: _type = "Task" [ 933.842807] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.854438] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cc1680-c46b-bc00-82aa-f2d0d605b543, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.940962] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0737bef3-b1c0-4023-a7d9-a1ee7531982d tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.016s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.942200] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 16.254s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.942368] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 933.942520] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 933.942927] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.629s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 933.943219] env[68217]: DEBUG nova.objects.instance [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lazy-loading 'resources' on Instance uuid ca9ef7ff-b942-4363-a4f8-9163791ec162 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 933.945142] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-784fb23b-0941-4266-85ad-8ce63e4070e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.954251] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00f80b65-a8c3-4a2a-acac-b58a6f5cd486 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.975439] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1601fec1-62b8-458b-8023-5efdbbc400b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.987800] env[68217]: INFO nova.compute.manager [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Took 26.22 seconds to build instance. [ 933.997824] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8461de-469e-48d7-b0ed-3c8115e24476 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.003336] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Releasing lock "refresh_cache-fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.003751] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 934.038875] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d98dde8b-1f37-4ae1-b89a-a6e890dd51fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.040865] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=177851MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 934.040865] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.041311] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961570, 'name': ReconfigVM_Task, 'duration_secs': 0.191826} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.044221] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Reconfigured VM instance instance-0000002c to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 934.054880] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f15e0da0-e97d-48a1-a255-cab0712b7e76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.066453] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 934.066453] env[68217]: value = "task-2961571" [ 934.066453] env[68217]: _type = "Task" [ 934.066453] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.072386] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 934.072386] env[68217]: value = "task-2961572" [ 934.072386] env[68217]: _type = "Task" [ 934.072386] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.075745] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961571, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.086402] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961572, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.154999] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.204475] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.204637] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.204863] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 934.205207] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 934.205207] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.208542] env[68217]: INFO nova.compute.manager [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Terminating instance [ 934.244843] env[68217]: DEBUG nova.network.neutron [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updated VIF entry in instance network info cache for port 772726db-4382-4051-9a7d-abfc670d5c9b. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 934.245017] env[68217]: DEBUG nova.network.neutron [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 934.253404] env[68217]: DEBUG nova.compute.manager [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 934.253666] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 934.255464] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151e5088-8541-42bf-be0a-6bd8e1c19cdf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.267661] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.267891] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-16e8a7d7-fe91-43b9-9c44-c396b5ea5372 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.333934] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 934.334217] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 934.334414] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleting the datastore file [datastore1] 213b720b-b782-41c4-b60d-ef0af4b62932 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.334797] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a610f648-0bed-4530-b86d-6318cc04fc31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.341893] env[68217]: DEBUG oslo_vmware.api [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 934.341893] env[68217]: value = "task-2961574" [ 934.341893] env[68217]: _type = "Task" [ 934.341893] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.356293] env[68217]: DEBUG oslo_vmware.api [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961574, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.356560] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cc1680-c46b-bc00-82aa-f2d0d605b543, 'name': SearchDatastore_Task, 'duration_secs': 0.037921} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.356848] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.357094] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 934.357327] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.357462] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.357644] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.357901] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c2706e1-bdcd-46a4-926b-d13a32e1bccb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.370048] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.370048] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 934.370602] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-babf06c1-7469-4a38-85cb-9381c14e8112 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.376860] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 934.376860] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5276d4e9-fa7e-4ddc-2090-c9bd91c3601b" [ 934.376860] env[68217]: _type = "Task" [ 934.376860] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.384724] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5276d4e9-fa7e-4ddc-2090-c9bd91c3601b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.490297] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3cd7aba5-2777-4892-9bb5-0790f4940b09 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 27.735s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 934.577713] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961571, 'name': PowerOffVM_Task, 'duration_secs': 0.208053} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.580619] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 934.581287] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:19:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b6918665-ab7d-45a4-86f9-01de99934033',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-847543468',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 934.581500] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 934.581655] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 934.581833] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 934.581986] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 934.582127] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 934.582406] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 934.582585] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 934.582753] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 934.582921] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 934.583131] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 934.590061] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73048336-70b8-4cef-8990-be38ec482a9a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.605415] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961572, 'name': ReconfigVM_Task, 'duration_secs': 0.173093} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.606656] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594156', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'name': 'volume-03332631-865e-4746-a213-a86fd1f1f4ef', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '0552d616-a406-4dfa-8a70-82f39fb98bbc', 'attached_at': '', 'detached_at': '', 'volume_id': '03332631-865e-4746-a213-a86fd1f1f4ef', 'serial': '03332631-865e-4746-a213-a86fd1f1f4ef'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 934.606920] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 934.607271] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 934.607271] env[68217]: value = "task-2961575" [ 934.607271] env[68217]: _type = "Task" [ 934.607271] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.607946] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cb03e76-89c6-49c4-9972-832e988176a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.620959] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961575, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.622771] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.623127] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2a59739f-0e0d-4d65-b218-753ef1f6d3e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.659928] env[68217]: DEBUG oslo_vmware.api [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961553, 'name': ReconfigVM_Task, 'duration_secs': 5.80048} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.660290] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.660544] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Reconfigured VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 934.692728] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 934.693105] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 934.693379] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Deleting the datastore file [datastore1] 0552d616-a406-4dfa-8a70-82f39fb98bbc {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.696671] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7773b42e-5047-4758-8d4d-f6091b731ec0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.702803] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for the task: (returnval){ [ 934.702803] env[68217]: value = "task-2961577" [ 934.702803] env[68217]: _type = "Task" [ 934.702803] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.712536] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "refresh_cache-4366c94c-164d-4cb9-8f04-7f26db4c0d3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.712745] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquired lock "refresh_cache-4366c94c-164d-4cb9-8f04-7f26db4c0d3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 934.712935] env[68217]: DEBUG nova.network.neutron [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 934.714081] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961577, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.748201] env[68217]: DEBUG oslo_concurrency.lockutils [req-0c4e241f-e450-44c4-822c-36582d52eb4c req-a0334439-0c4d-4c7d-b584-6b82c8bc0472 service nova] Releasing lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.847884] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146cc529-1c5e-41ee-ab2f-a7866eb7ed4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.855057] env[68217]: DEBUG oslo_vmware.api [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961574, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.34267} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.856953] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 934.857179] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 934.857369] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 934.857547] env[68217]: INFO nova.compute.manager [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Took 0.60 seconds to destroy the instance on the hypervisor. [ 934.857780] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 934.858367] env[68217]: DEBUG nova.compute.manager [-] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 934.858470] env[68217]: DEBUG nova.network.neutron [-] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 934.860875] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14dd19fa-5ddd-4f92-8461-507f6f464cb8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.894955] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fcae1c-9c14-4983-8819-c31f35147e54 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.905241] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5276d4e9-fa7e-4ddc-2090-c9bd91c3601b, 'name': SearchDatastore_Task, 'duration_secs': 0.018739} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.907061] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55776d6d-ff8b-4354-a365-096a26c4e1d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.910854] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6e01fd7-c94e-45e4-a16e-df89fdc9a761 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.917551] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 934.917551] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5270d558-c9d8-f135-709d-ebc781195b1c" [ 934.917551] env[68217]: _type = "Task" [ 934.917551] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.925387] env[68217]: DEBUG nova.compute.provider_tree [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 934.944393] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5270d558-c9d8-f135-709d-ebc781195b1c, 'name': SearchDatastore_Task, 'duration_secs': 0.010728} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.944649] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 934.944948] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 04149a5c-d1b5-4d71-a1ca-44696506a40d/04149a5c-d1b5-4d71-a1ca-44696506a40d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 934.945228] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e29a647f-59f2-4945-8770-6b433068a234 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.951939] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 934.951939] env[68217]: value = "task-2961578" [ 934.951939] env[68217]: _type = "Task" [ 934.951939] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.959976] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961578, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.123880] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961575, 'name': ReconfigVM_Task, 'duration_secs': 0.236175} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.125194] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ab571c-d21e-4aa5-948f-9a981ee055f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.145681] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:19:58Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='b6918665-ab7d-45a4-86f9-01de99934033',id=40,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-847543468',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 935.145931] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 935.146120] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 935.146276] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 935.146442] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 935.146558] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 935.146759] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 935.146912] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 935.147092] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 935.147282] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 935.147451] env[68217]: DEBUG nova.virt.hardware [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 935.148287] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1dc88d8-10e5-4d20-8b47-9c444f97816b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.153828] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 935.153828] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523d714f-cd7f-d2eb-3e69-a690f9cd99bb" [ 935.153828] env[68217]: _type = "Task" [ 935.153828] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.161841] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523d714f-cd7f-d2eb-3e69-a690f9cd99bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.212329] env[68217]: DEBUG oslo_vmware.api [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Task: {'id': task-2961577, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090841} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.215788] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.215788] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.215788] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.215788] env[68217]: INFO nova.compute.manager [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Took 2.37 seconds to destroy the instance on the hypervisor. [ 935.215788] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 935.215788] env[68217]: DEBUG nova.compute.manager [-] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 935.215788] env[68217]: DEBUG nova.network.neutron [-] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 935.266636] env[68217]: DEBUG nova.network.neutron [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 935.431075] env[68217]: DEBUG nova.scheduler.client.report [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 935.460545] env[68217]: DEBUG nova.network.neutron [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.469456] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961578, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.678437] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523d714f-cd7f-d2eb-3e69-a690f9cd99bb, 'name': SearchDatastore_Task, 'duration_secs': 0.007374} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.686121] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfiguring VM instance instance-0000004a to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 935.686121] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-491b2652-876e-4eac-bd37-7e5d894a3963 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.707346] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 935.707346] env[68217]: value = "task-2961579" [ 935.707346] env[68217]: _type = "Task" [ 935.707346] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.720469] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961579, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.937142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.994s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 935.943604] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 12.276s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 935.963554] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Releasing lock "refresh_cache-4366c94c-164d-4cb9-8f04-7f26db4c0d3c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 935.964025] env[68217]: DEBUG nova.compute.manager [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 935.964235] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 935.969139] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca8939ed-34f3-40ae-b0cb-8cb497bb05f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.977256] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961578, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.614863} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.978288] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 04149a5c-d1b5-4d71-a1ca-44696506a40d/04149a5c-d1b5-4d71-a1ca-44696506a40d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 935.978288] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 935.978536] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d23aae03-b195-4fe3-80ff-5b75a174aa3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.988020] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 935.989339] env[68217]: DEBUG nova.compute.manager [req-dbc4b72d-913a-45e5-819b-dd7e8013dca0 req-185baccc-2bce-49e8-9c95-d1074f02de5c service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received event network-vif-deleted-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 935.989515] env[68217]: INFO nova.compute.manager [req-dbc4b72d-913a-45e5-819b-dd7e8013dca0 req-185baccc-2bce-49e8-9c95-d1074f02de5c service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Neutron deleted interface efb63bec-2d1f-41ad-b7bc-f9dc46cdd111; detaching it from the instance and deleting it from the info cache [ 935.989781] env[68217]: DEBUG nova.network.neutron [req-dbc4b72d-913a-45e5-819b-dd7e8013dca0 req-185baccc-2bce-49e8-9c95-d1074f02de5c service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 935.991292] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-292093e0-83cb-4672-b4b0-fe0d990eb35f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.000184] env[68217]: INFO nova.scheduler.client.report [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted allocations for instance ca9ef7ff-b942-4363-a4f8-9163791ec162 [ 936.004364] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 936.004364] env[68217]: value = "task-2961580" [ 936.004364] env[68217]: _type = "Task" [ 936.004364] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.016929] env[68217]: DEBUG oslo_vmware.api [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 936.016929] env[68217]: value = "task-2961581" [ 936.016929] env[68217]: _type = "Task" [ 936.016929] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.025098] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961580, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.030403] env[68217]: DEBUG oslo_vmware.api [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961581, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.080690] env[68217]: DEBUG nova.compute.manager [req-8a2e7384-6c9f-472b-86ca-9b2c51786b50 req-30ce4003-b757-493b-9ad7-8371b1a6369c service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Received event network-vif-deleted-dd5b95b3-32c1-4279-b996-ecf817d6418d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 936.080911] env[68217]: INFO nova.compute.manager [req-8a2e7384-6c9f-472b-86ca-9b2c51786b50 req-30ce4003-b757-493b-9ad7-8371b1a6369c service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Neutron deleted interface dd5b95b3-32c1-4279-b996-ecf817d6418d; detaching it from the instance and deleting it from the info cache [ 936.081134] env[68217]: DEBUG nova.network.neutron [req-8a2e7384-6c9f-472b-86ca-9b2c51786b50 req-30ce4003-b757-493b-9ad7-8371b1a6369c service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.169526] env[68217]: DEBUG nova.network.neutron [-] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.215807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.216125] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.216338] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.216686] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.216686] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.222019] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961579, 'name': ReconfigVM_Task, 'duration_secs': 0.203465} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.222535] env[68217]: INFO nova.compute.manager [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Terminating instance [ 936.224399] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfigured VM instance instance-0000004a to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 936.226769] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-630f0b90-b6bd-49a7-968e-17278a7fcf38 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.249438] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfiguring VM instance instance-0000004a to attach disk [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.250412] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-791e2d0c-1f28-4b4e-956e-b176a549b384 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.270038] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 936.270038] env[68217]: value = "task-2961582" [ 936.270038] env[68217]: _type = "Task" [ 936.270038] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.278621] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961582, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.447797] env[68217]: INFO nova.compute.claims [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 936.497972] env[68217]: DEBUG oslo_concurrency.lockutils [req-dbc4b72d-913a-45e5-819b-dd7e8013dca0 req-185baccc-2bce-49e8-9c95-d1074f02de5c service nova] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.498181] env[68217]: DEBUG oslo_concurrency.lockutils [req-dbc4b72d-913a-45e5-819b-dd7e8013dca0 req-185baccc-2bce-49e8-9c95-d1074f02de5c service nova] Acquired lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.499099] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63f850f-f79c-43df-b281-ee4b737b8cbd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.518190] env[68217]: DEBUG oslo_concurrency.lockutils [req-dbc4b72d-913a-45e5-819b-dd7e8013dca0 req-185baccc-2bce-49e8-9c95-d1074f02de5c service nova] Releasing lock "580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 936.518446] env[68217]: WARNING nova.compute.manager [req-dbc4b72d-913a-45e5-819b-dd7e8013dca0 req-185baccc-2bce-49e8-9c95-d1074f02de5c service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Detach interface failed, port_id=efb63bec-2d1f-41ad-b7bc-f9dc46cdd111, reason: No device with interface-id efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 exists on VM: nova.exception.NotFound: No device with interface-id efb63bec-2d1f-41ad-b7bc-f9dc46cdd111 exists on VM [ 936.519060] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c0afa2ca-6d43-4b78-980e-8fa3d61093cd tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ca9ef7ff-b942-4363-a4f8-9163791ec162" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.823s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.530765] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961580, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.083451} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.534013] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 936.534337] env[68217]: DEBUG oslo_vmware.api [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961581, 'name': PowerOffVM_Task, 'duration_secs': 0.168473} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.535607] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55f60605-3e3f-49fd-902a-33a9d4255b4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.538221] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 936.538418] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 936.538888] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7d504b66-eb1c-4523-b984-cdf4fcc331cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.560952] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 04149a5c-d1b5-4d71-a1ca-44696506a40d/04149a5c-d1b5-4d71-a1ca-44696506a40d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 936.561342] env[68217]: DEBUG nova.network.neutron [-] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.562785] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c0a9c02f-50f4-42cc-a5d2-7a15dacea614 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.581208] env[68217]: INFO nova.compute.manager [-] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Took 1.37 seconds to deallocate network for instance. [ 936.581208] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 936.581208] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 936.581436] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Deleting the datastore file [datastore2] 4366c94c-164d-4cb9-8f04-7f26db4c0d3c {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 936.585266] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ab40dfd8-a417-4227-b246-289c84846fd5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.590061] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f2fd1fbe-f55f-4464-8c10-8fcfc8788b46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.592463] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 936.592463] env[68217]: value = "task-2961584" [ 936.592463] env[68217]: _type = "Task" [ 936.592463] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.598292] env[68217]: DEBUG oslo_vmware.api [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for the task: (returnval){ [ 936.598292] env[68217]: value = "task-2961585" [ 936.598292] env[68217]: _type = "Task" [ 936.598292] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.602252] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d25a91a1-99ed-45ea-9b68-dd92660f6caa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.618603] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961584, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.625311] env[68217]: DEBUG oslo_vmware.api [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961585, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.646626] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.646821] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 936.647039] env[68217]: DEBUG nova.network.neutron [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 936.649048] env[68217]: DEBUG nova.compute.manager [req-8a2e7384-6c9f-472b-86ca-9b2c51786b50 req-30ce4003-b757-493b-9ad7-8371b1a6369c service nova] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Detach interface failed, port_id=dd5b95b3-32c1-4279-b996-ecf817d6418d, reason: Instance 0552d616-a406-4dfa-8a70-82f39fb98bbc could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 936.672697] env[68217]: INFO nova.compute.manager [-] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Took 1.81 seconds to deallocate network for instance. [ 936.733521] env[68217]: DEBUG nova.compute.manager [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 936.733521] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 936.733521] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7503f4f0-6437-4cdb-b570-d8871e47bb20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.742739] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 936.744552] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7a5a5461-5c91-4ae2-b18e-333da5126126 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.751047] env[68217]: DEBUG oslo_vmware.api [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 936.751047] env[68217]: value = "task-2961586" [ 936.751047] env[68217]: _type = "Task" [ 936.751047] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.760189] env[68217]: DEBUG oslo_vmware.api [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961586, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.783256] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961582, 'name': ReconfigVM_Task, 'duration_secs': 0.376959} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.784796] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Reconfigured VM instance instance-0000004a to attach disk [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb/fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 936.785050] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431dfd7c-19ed-4fd1-bd4e-3ba41a08b53c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.805313] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a668124-69f2-4ab3-85dc-f6c7e08b4157 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.826244] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cb7187a-13dc-43fd-8986-bd48f7c179e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.844934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed6decee-c56a-4170-aed0-bf3a646d78a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.852152] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 936.854127] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-57475db2-5ca5-4692-9eaa-51aa2fd1aac5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.859507] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.860134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "580e6909-7d05-447a-a378-f0b8b71f059a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.860134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 936.860134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 936.860463] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "580e6909-7d05-447a-a378-f0b8b71f059a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 936.862072] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 936.862072] env[68217]: value = "task-2961587" [ 936.862072] env[68217]: _type = "Task" [ 936.862072] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.862358] env[68217]: INFO nova.compute.manager [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Terminating instance [ 936.872385] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961587, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.954453] env[68217]: INFO nova.compute.resource_tracker [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating resource usage from migration 491f93e8-1987-44d6-adf9-567206333bb4 [ 937.102311] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961584, 'name': ReconfigVM_Task, 'duration_secs': 0.422457} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.107999] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 04149a5c-d1b5-4d71-a1ca-44696506a40d/04149a5c-d1b5-4d71-a1ca-44696506a40d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 937.109745] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6aaff7d7-faad-4e22-97de-15694e3bef95 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.117159] env[68217]: DEBUG oslo_vmware.api [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Task: {'id': task-2961585, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.172943} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.118397] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.118584] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.118765] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.118937] env[68217]: INFO nova.compute.manager [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Took 1.15 seconds to destroy the instance on the hypervisor. [ 937.119191] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.119445] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 937.119445] env[68217]: value = "task-2961588" [ 937.119445] env[68217]: _type = "Task" [ 937.119445] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.119677] env[68217]: DEBUG nova.compute.manager [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.119759] env[68217]: DEBUG nova.network.neutron [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.134105] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961588, 'name': Rename_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.143759] env[68217]: DEBUG nova.network.neutron [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 937.145618] env[68217]: INFO nova.compute.manager [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Took 0.56 seconds to detach 1 volumes for instance. [ 937.148019] env[68217]: DEBUG nova.compute.manager [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Deleting volume: 03332631-865e-4746-a213-a86fd1f1f4ef {{(pid=68217) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 937.182633] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.264035] env[68217]: DEBUG oslo_vmware.api [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961586, 'name': PowerOffVM_Task, 'duration_secs': 0.212532} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.264418] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 937.264485] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 937.268499] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ee2ade2-06a7-4bbb-8890-61434b86a536 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.338324] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 937.341894] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 937.341894] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleting the datastore file [datastore2] fab7d1eb-ef05-4498-aa6d-a524c3bb59c8 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 937.341894] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c3c68ec-041b-4d52-a67a-7243246f7c61 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.345828] env[68217]: DEBUG oslo_vmware.api [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 937.345828] env[68217]: value = "task-2961591" [ 937.345828] env[68217]: _type = "Task" [ 937.345828] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.355892] env[68217]: DEBUG oslo_vmware.api [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961591, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.370676] env[68217]: DEBUG nova.compute.manager [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 937.370908] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 937.375779] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94724767-f2a5-4dcb-9f81-104412493001 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.380019] env[68217]: DEBUG oslo_vmware.api [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961587, 'name': PowerOnVM_Task, 'duration_secs': 0.398603} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.380019] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 937.386405] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 937.388904] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-df4fd284-f71a-4d78-845f-8c57a2e837af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.395498] env[68217]: DEBUG oslo_vmware.api [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 937.395498] env[68217]: value = "task-2961592" [ 937.395498] env[68217]: _type = "Task" [ 937.395498] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.403832] env[68217]: DEBUG oslo_vmware.api [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.451233] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3918b905-d661-4e06-a1cc-0266fdaf1ec5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.460927] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1db331-3e48-4a29-bede-06e77240bf5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.494594] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5ac1633-0757-49b1-abe3-f9fe9da863e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.503323] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c04f986-d78c-433e-a3d1-badda7502617 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.518133] env[68217]: DEBUG nova.compute.provider_tree [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.520429] env[68217]: DEBUG nova.network.neutron [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [{"id": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "address": "fa:16:3e:cd:e0:cd", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.208", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e834f9f-3d", "ovs_interfaceid": "9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.631487] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961588, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.642088] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "09a8469d-567c-4247-96eb-edf0f4040f65" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.642352] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "09a8469d-567c-4247-96eb-edf0f4040f65" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.642555] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "09a8469d-567c-4247-96eb-edf0f4040f65-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.642734] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "09a8469d-567c-4247-96eb-edf0f4040f65-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 937.642903] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "09a8469d-567c-4247-96eb-edf0f4040f65-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 937.645812] env[68217]: INFO nova.compute.manager [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Terminating instance [ 937.647827] env[68217]: DEBUG nova.network.neutron [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 937.702716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 937.856212] env[68217]: DEBUG oslo_vmware.api [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961591, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.163246} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.856561] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 937.857656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 937.857656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 937.857656] env[68217]: INFO nova.compute.manager [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Took 1.13 seconds to destroy the instance on the hypervisor. [ 937.857656] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 937.857656] env[68217]: DEBUG nova.compute.manager [-] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 937.857656] env[68217]: DEBUG nova.network.neutron [-] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 937.905661] env[68217]: DEBUG oslo_vmware.api [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961592, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.022817] env[68217]: DEBUG nova.scheduler.client.report [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 938.027129] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-580e6909-7d05-447a-a378-f0b8b71f059a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 938.045730] env[68217]: DEBUG nova.compute.manager [req-bba303e6-98e8-484e-8681-9cad83046e5f req-a832c93a-bdcc-4fd4-92b3-286925d40324 service nova] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Received event network-vif-deleted-41cb41cd-7c04-4409-948d-b45a5441a4f4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.134269] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961588, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.152186] env[68217]: DEBUG nova.compute.manager [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 938.152186] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 938.152186] env[68217]: INFO nova.compute.manager [-] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Took 1.03 seconds to deallocate network for instance. [ 938.152691] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc4ccd8-95bb-456d-bfd6-a2e808ead417 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.164163] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 938.164408] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3cd3bc3b-4c32-497c-9b39-0017b7fa4a7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.170355] env[68217]: DEBUG oslo_vmware.api [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 938.170355] env[68217]: value = "task-2961593" [ 938.170355] env[68217]: _type = "Task" [ 938.170355] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.181985] env[68217]: DEBUG oslo_vmware.api [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961593, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.299463] env[68217]: DEBUG nova.compute.manager [req-f09d169e-a1ac-4e70-a9a9-0c184cfffc5d req-ce9d652f-fcc1-41fb-b182-2def35e31935 service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Received event network-vif-deleted-f195768f-440b-4ba7-b21f-04f548d7bc10 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 938.299656] env[68217]: INFO nova.compute.manager [req-f09d169e-a1ac-4e70-a9a9-0c184cfffc5d req-ce9d652f-fcc1-41fb-b182-2def35e31935 service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Neutron deleted interface f195768f-440b-4ba7-b21f-04f548d7bc10; detaching it from the instance and deleting it from the info cache [ 938.299819] env[68217]: DEBUG nova.network.neutron [req-f09d169e-a1ac-4e70-a9a9-0c184cfffc5d req-ce9d652f-fcc1-41fb-b182-2def35e31935 service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.309529] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.309780] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.310186] env[68217]: INFO nova.compute.manager [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Shelving [ 938.391450] env[68217]: INFO nova.compute.manager [None req-e0c90a6f-ad30-494a-87f4-06e114f90f4b tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance to original state: 'active' [ 938.406832] env[68217]: DEBUG oslo_vmware.api [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961592, 'name': PowerOffVM_Task, 'duration_secs': 0.756505} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.407310] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.407652] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 938.408024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1b755772-18c4-4628-abe5-a2715503b145 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.474416] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.474707] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.475088] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleting the datastore file [datastore2] 580e6909-7d05-447a-a378-f0b8b71f059a {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.475805] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a1946d90-85f4-4d6c-9502-5e503021d3a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.483217] env[68217]: DEBUG oslo_vmware.api [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 938.483217] env[68217]: value = "task-2961595" [ 938.483217] env[68217]: _type = "Task" [ 938.483217] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.492634] env[68217]: DEBUG oslo_vmware.api [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961595, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.530859] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.591s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.531075] env[68217]: INFO nova.compute.manager [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Migrating [ 938.539365] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9b999f6-28f7-4a54-9099-adfe680a8cd9 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-580e6909-7d05-447a-a378-f0b8b71f059a-efb63bec-2d1f-41ad-b7bc-f9dc46cdd111" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.516s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 938.547715] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.933s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 938.548153] env[68217]: DEBUG nova.objects.instance [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lazy-loading 'resources' on Instance uuid 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 938.639127] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961588, 'name': Rename_Task, 'duration_secs': 1.02067} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.640185] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.640495] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-12f5f30c-a02e-4197-a6e7-234bd710c7f5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.649323] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 938.649323] env[68217]: value = "task-2961596" [ 938.649323] env[68217]: _type = "Task" [ 938.649323] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.659443] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961596, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.668065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 938.681743] env[68217]: DEBUG oslo_vmware.api [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961593, 'name': PowerOffVM_Task, 'duration_secs': 0.402315} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.682238] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 938.682508] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 938.682867] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-265e175b-2691-4398-9b46-63d6fa2341ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.745809] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 938.746041] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 938.746257] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleting the datastore file [datastore1] 09a8469d-567c-4247-96eb-edf0f4040f65 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 938.746518] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-104803ef-892d-4eab-9f33-f6e71cbaf0ac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.753480] env[68217]: DEBUG oslo_vmware.api [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 938.753480] env[68217]: value = "task-2961598" [ 938.753480] env[68217]: _type = "Task" [ 938.753480] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.763208] env[68217]: DEBUG oslo_vmware.api [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961598, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.782761] env[68217]: DEBUG nova.network.neutron [-] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 938.803508] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fcdd617-399d-494f-bc29-1c9db706f9e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.813146] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9907e1d2-9a2d-41b5-8512-60daa6f8fb70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.626682] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.627103] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 939.627103] env[68217]: DEBUG nova.network.neutron [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 939.631486] env[68217]: INFO nova.compute.manager [-] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Took 1.77 seconds to deallocate network for instance. [ 939.631953] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 939.632319] env[68217]: DEBUG nova.compute.manager [req-f09d169e-a1ac-4e70-a9a9-0c184cfffc5d req-ce9d652f-fcc1-41fb-b182-2def35e31935 service nova] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Detach interface failed, port_id=f195768f-440b-4ba7-b21f-04f548d7bc10, reason: Instance fab7d1eb-ef05-4498-aa6d-a524c3bb59c8 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 939.650022] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bec126b9-5e43-456b-bca8-38cff18929a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.668819] env[68217]: DEBUG oslo_vmware.api [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961598, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183107} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.681924] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.682192] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.682395] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.682625] env[68217]: INFO nova.compute.manager [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Took 1.53 seconds to destroy the instance on the hypervisor. [ 939.682941] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.683223] env[68217]: DEBUG oslo_vmware.api [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961595, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16457} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.684418] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961596, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.685121] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 939.685121] env[68217]: value = "task-2961599" [ 939.685121] env[68217]: _type = "Task" [ 939.685121] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 939.688310] env[68217]: DEBUG nova.compute.manager [-] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 939.688402] env[68217]: DEBUG nova.network.neutron [-] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 939.690757] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 939.691026] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 939.691291] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 939.691580] env[68217]: INFO nova.compute.manager [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Took 2.32 seconds to destroy the instance on the hypervisor. [ 939.691859] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 939.693322] env[68217]: DEBUG nova.compute.manager [-] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 939.693620] env[68217]: DEBUG nova.network.neutron [-] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 939.705765] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961599, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.099677] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d9dbcf-7a4a-4471-a903-f93f93dda65a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.107210] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19c575b4-23d8-4cb6-b59f-0a11116ed213 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.144098] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b32dc22-3239-4f97-a52a-7988be67d227 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.155810] env[68217]: DEBUG oslo_vmware.api [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961596, 'name': PowerOnVM_Task, 'duration_secs': 1.178835} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.158129] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 940.158371] env[68217]: INFO nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Took 10.23 seconds to spawn the instance on the hypervisor. [ 940.158784] env[68217]: DEBUG nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 940.159664] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60fb2070-5557-4501-8472-f814c1cd0e42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.164459] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aeaf12e6-517c-4b44-b9b8-abceb056dd78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.168624] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.168949] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.169229] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.169444] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 940.169678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 940.169803] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 940.178160] env[68217]: INFO nova.compute.manager [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Terminating instance [ 940.188464] env[68217]: DEBUG nova.compute.provider_tree [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.206096] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961599, 'name': PowerOffVM_Task, 'duration_secs': 0.22864} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 940.206910] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 940.207919] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819afe78-4c8d-40b1-b618-2d1ffb52cd61 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.227381] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2aacec8-9356-4f0f-9645-b66b2efca5b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.436631] env[68217]: DEBUG nova.compute.manager [req-3ef4040e-7d3a-4d91-ac3c-86fe3d77e9f5 req-a62e4f0d-3bae-4c60-bc1f-3eea4f1e2c56 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Received event network-vif-deleted-ed3866d2-77b3-4f38-903b-76d8b6349fc8 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.436896] env[68217]: INFO nova.compute.manager [req-3ef4040e-7d3a-4d91-ac3c-86fe3d77e9f5 req-a62e4f0d-3bae-4c60-bc1f-3eea4f1e2c56 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Neutron deleted interface ed3866d2-77b3-4f38-903b-76d8b6349fc8; detaching it from the instance and deleting it from the info cache [ 940.437501] env[68217]: DEBUG nova.network.neutron [req-3ef4040e-7d3a-4d91-ac3c-86fe3d77e9f5 req-a62e4f0d-3bae-4c60-bc1f-3eea4f1e2c56 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.636648] env[68217]: DEBUG nova.network.neutron [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance_info_cache with network_info: [{"id": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "address": "fa:16:3e:e8:e5:1d", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b92bd9-8f", "ovs_interfaceid": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.648212] env[68217]: DEBUG nova.network.neutron [-] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.701704] env[68217]: DEBUG nova.scheduler.client.report [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 940.712117] env[68217]: DEBUG nova.compute.manager [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 940.712117] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 940.713958] env[68217]: INFO nova.compute.manager [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Took 30.11 seconds to build instance. [ 940.717390] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02ea48fa-c290-4888-8607-18bddf6a3680 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.731077] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 940.731077] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e4380ad5-a5a8-4a58-b871-ee0975f25755 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.740217] env[68217]: DEBUG oslo_vmware.api [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 940.740217] env[68217]: value = "task-2961600" [ 940.740217] env[68217]: _type = "Task" [ 940.740217] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.742468] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 940.746817] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0b02873b-e189-45f1-acfe-311a567b8087 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.754173] env[68217]: DEBUG oslo_vmware.api [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961600, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.755735] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 940.755735] env[68217]: value = "task-2961601" [ 940.755735] env[68217]: _type = "Task" [ 940.755735] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 940.766781] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961601, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.785239] env[68217]: DEBUG nova.compute.manager [req-e9345b64-40ae-4099-9894-c476a6b5e00f req-323ae8b4-a907-4e9f-97bc-26e85c1deca4 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Received event network-vif-deleted-9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 940.786292] env[68217]: INFO nova.compute.manager [req-e9345b64-40ae-4099-9894-c476a6b5e00f req-323ae8b4-a907-4e9f-97bc-26e85c1deca4 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Neutron deleted interface 9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2; detaching it from the instance and deleting it from the info cache [ 940.786292] env[68217]: DEBUG nova.network.neutron [req-e9345b64-40ae-4099-9894-c476a6b5e00f req-323ae8b4-a907-4e9f-97bc-26e85c1deca4 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.942212] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-102f9a15-e710-4954-b5d4-0f080f62fc0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.956657] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c12bf095-4f3f-4c13-952a-85744fb43ebf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.989808] env[68217]: DEBUG nova.compute.manager [req-3ef4040e-7d3a-4d91-ac3c-86fe3d77e9f5 req-a62e4f0d-3bae-4c60-bc1f-3eea4f1e2c56 service nova] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Detach interface failed, port_id=ed3866d2-77b3-4f38-903b-76d8b6349fc8, reason: Instance 09a8469d-567c-4247-96eb-edf0f4040f65 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 941.139908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 941.151017] env[68217]: INFO nova.compute.manager [-] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Took 1.46 seconds to deallocate network for instance. [ 941.217737] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.671s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.219847] env[68217]: DEBUG nova.network.neutron [-] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 941.221473] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.005s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.223707] env[68217]: INFO nova.compute.claims [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 941.228414] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5431241e-5831-49e0-b853-5ca351b80ef4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.641s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.247357] env[68217]: INFO nova.scheduler.client.report [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Deleted allocations for instance 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1 [ 941.253979] env[68217]: DEBUG oslo_vmware.api [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961600, 'name': PowerOffVM_Task, 'duration_secs': 0.2151} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.254442] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 941.254648] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 941.254851] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32713c87-adb9-4527-87a0-70ebf0f2c2dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.273312] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961601, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.288143] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7b0b592e-d9f1-427d-b45e-aad8a380af42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.297687] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79d62e9-8d57-41d3-be56-c305790e9538 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.330805] env[68217]: DEBUG nova.compute.manager [req-e9345b64-40ae-4099-9894-c476a6b5e00f req-323ae8b4-a907-4e9f-97bc-26e85c1deca4 service nova] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Detach interface failed, port_id=9e834f9f-3d2f-4316-81a7-13d8f8ccc3f2, reason: Instance 580e6909-7d05-447a-a378-f0b8b71f059a could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 941.332961] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 941.333179] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 941.333355] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Deleting the datastore file [datastore2] fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 941.333598] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5b33946e-249c-4adb-9ecd-278e453f7a80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.339942] env[68217]: DEBUG oslo_vmware.api [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 941.339942] env[68217]: value = "task-2961603" [ 941.339942] env[68217]: _type = "Task" [ 941.339942] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.347325] env[68217]: DEBUG oslo_vmware.api [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961603, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.560320] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.563435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 941.660925] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 941.728874] env[68217]: INFO nova.compute.manager [-] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Took 2.03 seconds to deallocate network for instance. [ 941.762878] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8cacd34b-8629-480d-aae0-a5d9f29c7fe7 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "7371d4d3-e255-4a1f-8d5f-2ee1297e89d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.858s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 941.772025] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961601, 'name': CreateSnapshot_Task, 'duration_secs': 0.813246} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.772025] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 941.772025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff36cda9-301f-4872-858c-86e59d72fd84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.851325] env[68217]: DEBUG oslo_vmware.api [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961603, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.160344} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.851682] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 941.851906] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 941.852178] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 941.852398] env[68217]: INFO nova.compute.manager [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Took 1.14 seconds to destroy the instance on the hypervisor. [ 941.852679] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 941.852941] env[68217]: DEBUG nova.compute.manager [-] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 941.853096] env[68217]: DEBUG nova.network.neutron [-] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 942.064346] env[68217]: DEBUG nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 942.237215] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.291521] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 942.294265] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-abd6f451-e7e1-4a66-9992-9993c1d109b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.307185] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 942.307185] env[68217]: value = "task-2961605" [ 942.307185] env[68217]: _type = "Task" [ 942.307185] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.320107] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961605, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.467293] env[68217]: DEBUG nova.compute.manager [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Received event network-changed-772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.467524] env[68217]: DEBUG nova.compute.manager [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Refreshing instance network info cache due to event network-changed-772726db-4382-4051-9a7d-abfc670d5c9b. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 942.467693] env[68217]: DEBUG oslo_concurrency.lockutils [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] Acquiring lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.467822] env[68217]: DEBUG oslo_concurrency.lockutils [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] Acquired lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 942.467980] env[68217]: DEBUG nova.network.neutron [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Refreshing network info cache for port 772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 942.582344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df32553c-1ca3-4cc8-9d6e-b296a4e3ec8b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.585606] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 942.590537] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-409f910d-4f46-45c4-8838-d252a52ed8fd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.622999] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f5a8d6-238b-4072-af17-c55fbbf3d746 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.630263] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6720d9f-3a43-49b2-87e6-dc119b14454e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.644331] env[68217]: DEBUG nova.compute.provider_tree [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 942.656009] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09eb954-6fa5-4c8c-8b56-8a4f0cb39eee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.660457] env[68217]: DEBUG nova.network.neutron [-] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.676158] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance '35817c87-0c55-49bd-917a-59bd39de663c' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 942.813405] env[68217]: DEBUG nova.compute.manager [req-990653b6-4b25-41c3-811b-9c14cbc509b5 req-36af4c7a-7c47-474e-aa85-fb7a97b490da service nova] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Received event network-vif-deleted-747300c0-a758-483f-ba39-99efe6e731ec {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 942.819594] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961605, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.149015] env[68217]: DEBUG nova.scheduler.client.report [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 943.182218] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.182608] env[68217]: INFO nova.compute.manager [-] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Took 1.33 seconds to deallocate network for instance. [ 943.182908] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f5bf9341-94a0-46a4-9c23-782fcc1bcd01 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.196207] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 943.196207] env[68217]: value = "task-2961606" [ 943.196207] env[68217]: _type = "Task" [ 943.196207] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.205152] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961606, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.241682] env[68217]: DEBUG nova.network.neutron [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updated VIF entry in instance network info cache for port 772726db-4382-4051-9a7d-abfc670d5c9b. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 943.242064] env[68217]: DEBUG nova.network.neutron [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.318855] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961605, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.657161] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 943.658222] env[68217]: DEBUG nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 943.660845] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.572s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 943.662252] env[68217]: INFO nova.compute.claims [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 943.692140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 943.706963] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961606, 'name': PowerOffVM_Task, 'duration_secs': 0.215823} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.707237] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.707409] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance '35817c87-0c55-49bd-917a-59bd39de663c' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 943.745237] env[68217]: DEBUG oslo_concurrency.lockutils [req-e6a1719f-b18c-4755-82bb-3e7bd3125de3 req-935ac6d3-db99-4d68-a2cd-0526e5af3c4d service nova] Releasing lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 943.819709] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961605, 'name': CloneVM_Task, 'duration_secs': 1.353103} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.819976] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Created linked-clone VM from snapshot [ 943.820708] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea795af-608a-4e13-a451-2f5677e5ee2d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.828144] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Uploading image 16498e3d-bc2f-4eea-bb32-2fe6e2472b81 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 943.852993] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 943.852993] env[68217]: value = "vm-594329" [ 943.852993] env[68217]: _type = "VirtualMachine" [ 943.852993] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 943.853296] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6cac404f-9b67-4ab2-b1a4-8c2d29e26cac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.860930] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease: (returnval){ [ 943.860930] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d546aa-59bd-d596-1da4-4356cc340e2a" [ 943.860930] env[68217]: _type = "HttpNfcLease" [ 943.860930] env[68217]: } obtained for exporting VM: (result){ [ 943.860930] env[68217]: value = "vm-594329" [ 943.860930] env[68217]: _type = "VirtualMachine" [ 943.860930] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 943.861191] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the lease: (returnval){ [ 943.861191] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d546aa-59bd-d596-1da4-4356cc340e2a" [ 943.861191] env[68217]: _type = "HttpNfcLease" [ 943.861191] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 943.867803] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 943.867803] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d546aa-59bd-d596-1da4-4356cc340e2a" [ 943.867803] env[68217]: _type = "HttpNfcLease" [ 943.867803] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 944.066995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "7584180b-efa6-4038-9f3a-619ab7937553" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.067335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "7584180b-efa6-4038-9f3a-619ab7937553" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.067577] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "7584180b-efa6-4038-9f3a-619ab7937553-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 944.067785] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "7584180b-efa6-4038-9f3a-619ab7937553-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 944.067924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "7584180b-efa6-4038-9f3a-619ab7937553-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 944.069935] env[68217]: INFO nova.compute.manager [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Terminating instance [ 944.166842] env[68217]: DEBUG nova.compute.utils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 944.169993] env[68217]: DEBUG nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 944.170179] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 944.212633] env[68217]: DEBUG nova.policy [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5f9c6fc51404b3491c5c3decd27aa7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '905b15e740ad4f879ba61518ba400680', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 944.215980] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 944.216248] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 944.216899] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 944.216899] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 944.216899] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 944.216899] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 944.217123] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 944.217210] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 944.217375] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 944.217556] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 944.217696] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 944.223208] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee7706f4-334d-4610-99ff-4e5acef8cb2a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.240496] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 944.240496] env[68217]: value = "task-2961608" [ 944.240496] env[68217]: _type = "Task" [ 944.240496] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.248920] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961608, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.369845] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 944.369845] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d546aa-59bd-d596-1da4-4356cc340e2a" [ 944.369845] env[68217]: _type = "HttpNfcLease" [ 944.369845] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 944.370164] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 944.370164] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d546aa-59bd-d596-1da4-4356cc340e2a" [ 944.370164] env[68217]: _type = "HttpNfcLease" [ 944.370164] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 944.370901] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1e443a4-ba27-48d7-a57d-cb4e521eb3ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.378420] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa92bc-6740-0a95-5e52-9703b00490e6/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 944.378593] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa92bc-6740-0a95-5e52-9703b00490e6/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 944.500393] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bd682611-aeb2-41ab-af2a-fe68484039f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.573346] env[68217]: DEBUG nova.compute.manager [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 944.573553] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 944.574553] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d86e157d-2e4d-460b-a5a2-e605c7afe91d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.584024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 944.584024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-90631c53-7acd-4b55-a2c7-fc8eb38d409d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.587816] env[68217]: DEBUG oslo_vmware.api [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 944.587816] env[68217]: value = "task-2961609" [ 944.587816] env[68217]: _type = "Task" [ 944.587816] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.596032] env[68217]: DEBUG oslo_vmware.api [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961609, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.671547] env[68217]: DEBUG nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 944.754117] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961608, 'name': ReconfigVM_Task, 'duration_secs': 0.292518} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.759235] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance '35817c87-0c55-49bd-917a-59bd39de663c' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 944.851015] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Successfully created port: f42ea21b-6ba7-4a8c-846e-852bfff623e1 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.100357] env[68217]: DEBUG oslo_vmware.api [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961609, 'name': PowerOffVM_Task, 'duration_secs': 0.204505} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.105041] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 945.105041] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 945.105041] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7e6a7613-c5e5-44ce-9c6d-73bf725afeaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.119240] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c95c6bcc-f801-488c-b1cf-c96935e70d00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.128889] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf70ad6-e736-4297-bc49-c492c5ecb0cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.164901] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17166906-3045-4dab-bc8b-b0b2d9c3f0ac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.171472] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 945.172065] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 945.172447] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Deleting the datastore file [datastore1] 7584180b-efa6-4038-9f3a-619ab7937553 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 945.175533] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-053bee71-f4c5-4b4e-8400-3a8e218aba27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.178833] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22c53d45-dc8c-4bf4-baf9-758178912f5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.189315] env[68217]: DEBUG oslo_vmware.api [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for the task: (returnval){ [ 945.189315] env[68217]: value = "task-2961611" [ 945.189315] env[68217]: _type = "Task" [ 945.189315] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.209160] env[68217]: DEBUG nova.compute.provider_tree [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 945.217439] env[68217]: DEBUG oslo_vmware.api [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961611, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.265210] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.265479] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.265631] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.265804] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.265957] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.266155] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.266529] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.267111] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.268714] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.268714] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.268714] env[68217]: DEBUG nova.virt.hardware [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.273278] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Reconfiguring VM instance instance-00000050 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 945.274952] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658525a8-b5cb-4da6-bf97-7775f1c37556 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.295337] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 945.295337] env[68217]: value = "task-2961612" [ 945.295337] env[68217]: _type = "Task" [ 945.295337] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.304779] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961612, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.343284] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Successfully created port: 4e400891-4a77-45f0-a314-6963b2ea95f0 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.395076] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.395076] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.413847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "da1524a7-2756-4429-ada2-b1f493544bd2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.414117] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.414335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "da1524a7-2756-4429-ada2-b1f493544bd2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 945.414559] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 945.414676] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 945.417286] env[68217]: INFO nova.compute.manager [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Terminating instance [ 945.679052] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Successfully created port: c5a3c187-8daf-4f2c-b103-d26c4f4b1792 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.691848] env[68217]: DEBUG nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 945.703679] env[68217]: DEBUG oslo_vmware.api [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Task: {'id': task-2961611, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.439201} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.704052] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.705313] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.705313] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.705313] env[68217]: INFO nova.compute.manager [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Took 1.13 seconds to destroy the instance on the hypervisor. [ 945.705313] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 945.705571] env[68217]: DEBUG nova.compute.manager [-] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 945.705610] env[68217]: DEBUG nova.network.neutron [-] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 945.713562] env[68217]: DEBUG nova.scheduler.client.report [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 945.720546] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 945.720866] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 945.721157] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 945.721419] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 945.721622] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 945.721782] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 945.722051] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 945.722275] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 945.722960] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 945.722960] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 945.723325] env[68217]: DEBUG nova.virt.hardware [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 945.724203] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de9c422a-47cd-4611-be8b-0a328c4821c8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.733701] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c039804-9126-4950-a9d8-94ed6cf4fbcc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.806072] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961612, 'name': ReconfigVM_Task, 'duration_secs': 0.203375} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.806717] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Reconfigured VM instance instance-00000050 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 945.807405] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7ca32b1-75e4-4f98-87f9-ad5b1dd08db7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.833549] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Reconfiguring VM instance instance-00000050 to attach disk [datastore1] 35817c87-0c55-49bd-917a-59bd39de663c/35817c87-0c55-49bd-917a-59bd39de663c.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.834252] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d05d35ed-7b5d-42af-a5ff-624d3f500486 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.853721] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 945.853721] env[68217]: value = "task-2961613" [ 945.853721] env[68217]: _type = "Task" [ 945.853721] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.865489] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961613, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.897714] env[68217]: DEBUG nova.compute.utils [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 945.921099] env[68217]: DEBUG nova.compute.manager [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 945.921411] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 945.922563] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1fc2ac-9aa1-45e6-be34-1ef2731da5c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.931293] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 945.931643] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d24265f-4308-467a-8548-24bb4488f4fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.938682] env[68217]: DEBUG oslo_vmware.api [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 945.938682] env[68217]: value = "task-2961614" [ 945.938682] env[68217]: _type = "Task" [ 945.938682] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.947686] env[68217]: DEBUG oslo_vmware.api [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961614, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.219460] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.558s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.220337] env[68217]: DEBUG nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 946.224926] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.443s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 946.225326] env[68217]: DEBUG nova.objects.instance [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lazy-loading 'resources' on Instance uuid d3468ec2-6548-400a-b247-a6ab1156cab5 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 946.368922] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961613, 'name': ReconfigVM_Task, 'duration_secs': 0.288415} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.368922] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Reconfigured VM instance instance-00000050 to attach disk [datastore1] 35817c87-0c55-49bd-917a-59bd39de663c/35817c87-0c55-49bd-917a-59bd39de663c.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 946.368922] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance '35817c87-0c55-49bd-917a-59bd39de663c' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 946.402906] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 946.453570] env[68217]: DEBUG oslo_vmware.api [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961614, 'name': PowerOffVM_Task, 'duration_secs': 0.269007} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.453570] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 946.453570] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 946.453570] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-504a8497-0d5a-4984-9038-e19ebb3ca02a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.522212] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 946.522452] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 946.522734] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Deleting the datastore file [datastore2] da1524a7-2756-4429-ada2-b1f493544bd2 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 946.523255] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b341ae0-f696-419c-a8b0-d286871d5af0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.530234] env[68217]: DEBUG oslo_vmware.api [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for the task: (returnval){ [ 946.530234] env[68217]: value = "task-2961616" [ 946.530234] env[68217]: _type = "Task" [ 946.530234] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.539143] env[68217]: DEBUG oslo_vmware.api [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961616, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.729351] env[68217]: DEBUG nova.compute.utils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 946.734690] env[68217]: DEBUG nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 946.734940] env[68217]: DEBUG nova.network.neutron [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 946.794027] env[68217]: DEBUG nova.policy [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36276be4c67c4abfa0941293d4cc800b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ebfeb38b81794c558c1164cecd7fa221', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 946.873894] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-633348a0-58c4-43af-8cd9-eb7121cbcd5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.905847] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e510a46-6cf9-4361-90f5-e66ca8deee21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.930946] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance '35817c87-0c55-49bd-917a-59bd39de663c' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 946.961150] env[68217]: DEBUG nova.network.neutron [-] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.045729] env[68217]: DEBUG oslo_vmware.api [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Task: {'id': task-2961616, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.216001} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.046017] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 947.047182] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 947.047182] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 947.047182] env[68217]: INFO nova.compute.manager [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Took 1.13 seconds to destroy the instance on the hypervisor. [ 947.047182] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 947.047182] env[68217]: DEBUG nova.compute.manager [-] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 947.047182] env[68217]: DEBUG nova.network.neutron [-] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 947.224489] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdf4147f-d26a-4f8a-8968-45da3180fa67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.232601] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dc59c1-7c83-4d20-91fb-a0a8668110c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.236329] env[68217]: DEBUG nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 947.269635] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aefd1a4f-a7ca-4f75-883f-3e76cb4a3a96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.277059] env[68217]: DEBUG nova.compute.manager [req-911890bd-3058-4731-a3c1-96ae079b7c9b req-f73d0531-2497-421c-80fb-a475d386dd08 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Received event network-vif-deleted-5417460d-31c2-4462-b8aa-192085fc884f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.277366] env[68217]: INFO nova.compute.manager [req-911890bd-3058-4731-a3c1-96ae079b7c9b req-f73d0531-2497-421c-80fb-a475d386dd08 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Neutron deleted interface 5417460d-31c2-4462-b8aa-192085fc884f; detaching it from the instance and deleting it from the info cache [ 947.277458] env[68217]: DEBUG nova.network.neutron [req-911890bd-3058-4731-a3c1-96ae079b7c9b req-f73d0531-2497-421c-80fb-a475d386dd08 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.282027] env[68217]: DEBUG nova.network.neutron [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Successfully created port: 13f84539-5bd2-4d90-9636-4109e055cb5e {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 947.287522] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-375a5400-1ed9-4c6c-8f86-9c2df354cd7c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.306100] env[68217]: DEBUG nova.compute.provider_tree [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 947.471553] env[68217]: INFO nova.compute.manager [-] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Took 1.77 seconds to deallocate network for instance. [ 947.520026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.520026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.520026] env[68217]: INFO nova.compute.manager [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Attaching volume 8c9d92b9-9da4-4b57-8504-b765266d4fee to /dev/sdb [ 947.569428] env[68217]: DEBUG nova.network.neutron [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Port 63b92bd9-8f95-481e-9ef4-468ea20dade1 binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 947.577156] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde14543-e190-4bc4-b7a4-2a7d193994de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.586029] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b4d85c-19fc-45ed-a20c-1f52eb1efa13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.600799] env[68217]: DEBUG nova.virt.block_device [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updating existing volume attachment record: 8267f566-22f8-4800-93f2-3167a649c1a4 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 947.693188] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Successfully updated port: f42ea21b-6ba7-4a8c-846e-852bfff623e1 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.779818] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4dabd36-6cb4-4ffa-8e0c-448c3162409f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.790614] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55ce4cf3-d992-4ff3-bdce-d341eaec5a4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.809447] env[68217]: DEBUG nova.scheduler.client.report [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 947.828112] env[68217]: DEBUG nova.compute.manager [req-911890bd-3058-4731-a3c1-96ae079b7c9b req-f73d0531-2497-421c-80fb-a475d386dd08 service nova] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Detach interface failed, port_id=5417460d-31c2-4462-b8aa-192085fc884f, reason: Instance 7584180b-efa6-4038-9f3a-619ab7937553 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 947.833831] env[68217]: DEBUG nova.compute.manager [req-d081bdbc-7b27-4b9b-a001-6b2d2501e56d req-18ffaf0f-db37-485f-9810-95315493fe34 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-vif-plugged-f42ea21b-6ba7-4a8c-846e-852bfff623e1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.833831] env[68217]: DEBUG oslo_concurrency.lockutils [req-d081bdbc-7b27-4b9b-a001-6b2d2501e56d req-18ffaf0f-db37-485f-9810-95315493fe34 service nova] Acquiring lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 947.833831] env[68217]: DEBUG oslo_concurrency.lockutils [req-d081bdbc-7b27-4b9b-a001-6b2d2501e56d req-18ffaf0f-db37-485f-9810-95315493fe34 service nova] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 947.833831] env[68217]: DEBUG oslo_concurrency.lockutils [req-d081bdbc-7b27-4b9b-a001-6b2d2501e56d req-18ffaf0f-db37-485f-9810-95315493fe34 service nova] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 947.835470] env[68217]: DEBUG nova.compute.manager [req-d081bdbc-7b27-4b9b-a001-6b2d2501e56d req-18ffaf0f-db37-485f-9810-95315493fe34 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] No waiting events found dispatching network-vif-plugged-f42ea21b-6ba7-4a8c-846e-852bfff623e1 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 947.835892] env[68217]: WARNING nova.compute.manager [req-d081bdbc-7b27-4b9b-a001-6b2d2501e56d req-18ffaf0f-db37-485f-9810-95315493fe34 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received unexpected event network-vif-plugged-f42ea21b-6ba7-4a8c-846e-852bfff623e1 for instance with vm_state building and task_state spawning. [ 947.842493] env[68217]: DEBUG nova.compute.manager [req-55f56301-a657-4740-acd1-4286324aff1b req-e81b4e83-3f61-440d-aa4c-024c486152f4 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Received event network-vif-deleted-396bcf29-b670-48a2-8e98-fb771af5eb13 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 947.842753] env[68217]: INFO nova.compute.manager [req-55f56301-a657-4740-acd1-4286324aff1b req-e81b4e83-3f61-440d-aa4c-024c486152f4 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Neutron deleted interface 396bcf29-b670-48a2-8e98-fb771af5eb13; detaching it from the instance and deleting it from the info cache [ 947.842986] env[68217]: DEBUG nova.network.neutron [req-55f56301-a657-4740-acd1-4286324aff1b req-e81b4e83-3f61-440d-aa4c-024c486152f4 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 947.978972] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.080696] env[68217]: DEBUG nova.network.neutron [-] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.246080] env[68217]: DEBUG nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 948.275508] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 948.275928] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 948.276246] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 948.276578] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 948.276866] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 948.277156] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 948.277544] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 948.277826] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 948.278159] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 948.278474] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 948.278791] env[68217]: DEBUG nova.virt.hardware [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 948.280277] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb0bade0-f459-49f0-abc9-27ece7a2e2a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.292647] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae7fbcee-b2bd-426e-883d-518d12c00c45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.316337] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.091s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.320101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.679s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.320726] env[68217]: DEBUG nova.objects.instance [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lazy-loading 'resources' on Instance uuid d0d8ed27-003e-43e2-8a07-041420a2c758 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 948.345353] env[68217]: INFO nova.scheduler.client.report [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted allocations for instance d3468ec2-6548-400a-b247-a6ab1156cab5 [ 948.354359] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-290e086e-099d-4365-b7fb-7edd51c1192e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.365886] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47586e40-bffb-4b1f-93ee-82cb88475587 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.400855] env[68217]: DEBUG nova.compute.manager [req-55f56301-a657-4740-acd1-4286324aff1b req-e81b4e83-3f61-440d-aa4c-024c486152f4 service nova] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Detach interface failed, port_id=396bcf29-b670-48a2-8e98-fb771af5eb13, reason: Instance da1524a7-2756-4429-ada2-b1f493544bd2 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 948.586488] env[68217]: INFO nova.compute.manager [-] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Took 1.54 seconds to deallocate network for instance. [ 948.598921] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "35817c87-0c55-49bd-917a-59bd39de663c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 948.600513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 948.600513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 948.863296] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b9d55355-e3ac-4869-a5c5-1f210a3b5b91 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "d3468ec2-6548-400a-b247-a6ab1156cab5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.525s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.101753] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.159098] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab6e4b61-0e39-46d1-9b89-b227893c908c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.166637] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb426459-0c01-47ac-963d-8857f7f3539a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.199985] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ac963b8-1358-4f00-b3e9-c4cca5e08f0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.210104] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13484aea-28a6-45ee-a7e3-49f99a89164a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.225870] env[68217]: DEBUG nova.compute.provider_tree [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 949.308949] env[68217]: DEBUG nova.compute.manager [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-changed-f42ea21b-6ba7-4a8c-846e-852bfff623e1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.309182] env[68217]: DEBUG nova.compute.manager [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Refreshing instance network info cache due to event network-changed-f42ea21b-6ba7-4a8c-846e-852bfff623e1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 949.309410] env[68217]: DEBUG oslo_concurrency.lockutils [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] Acquiring lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.309622] env[68217]: DEBUG oslo_concurrency.lockutils [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] Acquired lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.309734] env[68217]: DEBUG nova.network.neutron [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Refreshing network info cache for port f42ea21b-6ba7-4a8c-846e-852bfff623e1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 949.338299] env[68217]: DEBUG nova.network.neutron [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Successfully updated port: 13f84539-5bd2-4d90-9636-4109e055cb5e {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 949.641032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.641156] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.641336] env[68217]: DEBUG nova.network.neutron [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.729120] env[68217]: DEBUG nova.scheduler.client.report [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 949.842083] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "refresh_cache-4a555172-a2a3-410b-a0fe-38964cee9a22" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 949.842083] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "refresh_cache-4a555172-a2a3-410b-a0fe-38964cee9a22" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 949.842083] env[68217]: DEBUG nova.network.neutron [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 949.859045] env[68217]: DEBUG nova.network.neutron [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 949.919535] env[68217]: DEBUG nova.compute.manager [req-90978422-3164-4ea9-b500-d94da763bd7e req-50ba1f49-943c-4fc5-8054-eb4d6a3a72f4 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-vif-plugged-4e400891-4a77-45f0-a314-6963b2ea95f0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 949.919810] env[68217]: DEBUG oslo_concurrency.lockutils [req-90978422-3164-4ea9-b500-d94da763bd7e req-50ba1f49-943c-4fc5-8054-eb4d6a3a72f4 service nova] Acquiring lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 949.919966] env[68217]: DEBUG oslo_concurrency.lockutils [req-90978422-3164-4ea9-b500-d94da763bd7e req-50ba1f49-943c-4fc5-8054-eb4d6a3a72f4 service nova] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 949.920145] env[68217]: DEBUG oslo_concurrency.lockutils [req-90978422-3164-4ea9-b500-d94da763bd7e req-50ba1f49-943c-4fc5-8054-eb4d6a3a72f4 service nova] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 949.920401] env[68217]: DEBUG nova.compute.manager [req-90978422-3164-4ea9-b500-d94da763bd7e req-50ba1f49-943c-4fc5-8054-eb4d6a3a72f4 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] No waiting events found dispatching network-vif-plugged-4e400891-4a77-45f0-a314-6963b2ea95f0 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 949.920475] env[68217]: WARNING nova.compute.manager [req-90978422-3164-4ea9-b500-d94da763bd7e req-50ba1f49-943c-4fc5-8054-eb4d6a3a72f4 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received unexpected event network-vif-plugged-4e400891-4a77-45f0-a314-6963b2ea95f0 for instance with vm_state building and task_state spawning. [ 949.996478] env[68217]: DEBUG nova.network.neutron [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.237168] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.915s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.238999] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.349s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.240137] env[68217]: DEBUG nova.objects.instance [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 950.249931] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Successfully updated port: 4e400891-4a77-45f0-a314-6963b2ea95f0 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 950.285988] env[68217]: INFO nova.scheduler.client.report [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Deleted allocations for instance d0d8ed27-003e-43e2-8a07-041420a2c758 [ 950.398388] env[68217]: DEBUG nova.network.neutron [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 950.500956] env[68217]: DEBUG oslo_concurrency.lockutils [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] Releasing lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 950.501275] env[68217]: DEBUG nova.compute.manager [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Received event network-vif-plugged-13f84539-5bd2-4d90-9636-4109e055cb5e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 950.501476] env[68217]: DEBUG oslo_concurrency.lockutils [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] Acquiring lock "4a555172-a2a3-410b-a0fe-38964cee9a22-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 950.501754] env[68217]: DEBUG oslo_concurrency.lockutils [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 950.501920] env[68217]: DEBUG oslo_concurrency.lockutils [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 950.502144] env[68217]: DEBUG nova.compute.manager [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] No waiting events found dispatching network-vif-plugged-13f84539-5bd2-4d90-9636-4109e055cb5e {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 950.502344] env[68217]: WARNING nova.compute.manager [req-b0f93bf2-a1a3-4d54-9a6e-2429448cee42 req-00d24082-4cc6-40e8-a957-79ce1dc396a1 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Received unexpected event network-vif-plugged-13f84539-5bd2-4d90-9636-4109e055cb5e for instance with vm_state building and task_state spawning. [ 950.527525] env[68217]: DEBUG nova.network.neutron [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance_info_cache with network_info: [{"id": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "address": "fa:16:3e:e8:e5:1d", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b92bd9-8f", "ovs_interfaceid": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.587472] env[68217]: DEBUG nova.network.neutron [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Updating instance_info_cache with network_info: [{"id": "13f84539-5bd2-4d90-9636-4109e055cb5e", "address": "fa:16:3e:d3:31:ab", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f84539-5b", "ovs_interfaceid": "13f84539-5bd2-4d90-9636-4109e055cb5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.793723] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ee931ff1-dc7b-4ea0-aa00-e05c5a2e2d4e tempest-ServersTestJSON-300695206 tempest-ServersTestJSON-300695206-project-member] Lock "d0d8ed27-003e-43e2-8a07-041420a2c758" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 23.757s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.030944] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.090401] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "refresh_cache-4a555172-a2a3-410b-a0fe-38964cee9a22" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 951.090846] env[68217]: DEBUG nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Instance network_info: |[{"id": "13f84539-5bd2-4d90-9636-4109e055cb5e", "address": "fa:16:3e:d3:31:ab", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f84539-5b", "ovs_interfaceid": "13f84539-5bd2-4d90-9636-4109e055cb5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 951.091370] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d3:31:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '13f84539-5bd2-4d90-9636-4109e055cb5e', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 951.099632] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 951.100217] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 951.100464] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-80e1f22d-b020-42ba-9d5b-78cdaac2a1f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.122394] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 951.122394] env[68217]: value = "task-2961621" [ 951.122394] env[68217]: _type = "Task" [ 951.122394] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.132669] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961621, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.252760] env[68217]: DEBUG oslo_concurrency.lockutils [None req-13eabf03-f64e-40f3-8bea-fb056d4b6ae6 tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.014s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 951.253917] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 17.213s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 951.564572] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84f149c-4177-4665-8802-d9a1416f4c89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.590058] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-925b6de7-d3cf-4aca-81da-9634aaa9f141 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.597337] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance '35817c87-0c55-49bd-917a-59bd39de663c' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 951.617465] env[68217]: DEBUG nova.compute.manager [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Received event network-changed-13f84539-5bd2-4d90-9636-4109e055cb5e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 951.617465] env[68217]: DEBUG nova.compute.manager [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Refreshing instance network info cache due to event network-changed-13f84539-5bd2-4d90-9636-4109e055cb5e. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 951.617790] env[68217]: DEBUG oslo_concurrency.lockutils [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] Acquiring lock "refresh_cache-4a555172-a2a3-410b-a0fe-38964cee9a22" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.617907] env[68217]: DEBUG oslo_concurrency.lockutils [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] Acquired lock "refresh_cache-4a555172-a2a3-410b-a0fe-38964cee9a22" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.618176] env[68217]: DEBUG nova.network.neutron [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Refreshing network info cache for port 13f84539-5bd2-4d90-9636-4109e055cb5e {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 951.634088] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961621, 'name': CreateVM_Task, 'duration_secs': 0.325092} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.635146] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 951.636032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.636032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 951.636297] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 951.637365] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dbdeda16-0f29-4cf0-87d8-6b273440df27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.642556] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 951.642556] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ad8238-efb8-dfd2-a177-0ed77d373e00" [ 951.642556] env[68217]: _type = "Task" [ 951.642556] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.652331] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ad8238-efb8-dfd2-a177-0ed77d373e00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.069222] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa92bc-6740-0a95-5e52-9703b00490e6/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 952.070194] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c0bf910-c945-4c80-9212-747aeff92687 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.077478] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa92bc-6740-0a95-5e52-9703b00490e6/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 952.077709] env[68217]: ERROR oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa92bc-6740-0a95-5e52-9703b00490e6/disk-0.vmdk due to incomplete transfer. [ 952.077877] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-890b717d-de61-421a-848a-0b8793caa7df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.085363] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/52fa92bc-6740-0a95-5e52-9703b00490e6/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 952.086042] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Uploaded image 16498e3d-bc2f-4eea-bb32-2fe6e2472b81 to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 952.087782] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 952.088033] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-739f9e08-e663-45fc-9117-b51fba525fc7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.095509] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 952.095509] env[68217]: value = "task-2961622" [ 952.095509] env[68217]: _type = "Task" [ 952.095509] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.104728] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 952.105016] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961622, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.105258] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74fa5367-7725-403f-b955-f31c1c4e60f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.112193] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 952.112193] env[68217]: value = "task-2961623" [ 952.112193] env[68217]: _type = "Task" [ 952.112193] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.125864] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961623, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.153400] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ad8238-efb8-dfd2-a177-0ed77d373e00, 'name': SearchDatastore_Task, 'duration_secs': 0.009543} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.153719] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.154254] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 952.154533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.154688] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.154932] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 952.155277] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-11ca4455-1eee-4a8b-9d11-a94d8f4ee8c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.164527] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 952.164730] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 952.165621] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4be2caf4-23f7-4a34-b088-8968567a0c7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.171464] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 952.171464] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526c8236-dbf3-554b-71a4-263c138223d5" [ 952.171464] env[68217]: _type = "Task" [ 952.171464] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.179656] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526c8236-dbf3-554b-71a4-263c138223d5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.278358] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Applying migration context for instance 35817c87-0c55-49bd-917a-59bd39de663c as it has an incoming, in-progress migration 491f93e8-1987-44d6-adf9-567206333bb4. Migration status is post-migrating {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 952.281915] env[68217]: INFO nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating resource usage from migration 491f93e8-1987-44d6-adf9-567206333bb4 [ 952.307627] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.307823] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance da1524a7-2756-4429-ada2-b1f493544bd2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.308067] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 580e6909-7d05-447a-a378-f0b8b71f059a is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.308166] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 0552d616-a406-4dfa-8a70-82f39fb98bbc is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.308220] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d14026b1-84dd-430e-be94-94dcb1f47473 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308327] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7584180b-efa6-4038-9f3a-619ab7937553 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 2, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.308465] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308640] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.308700] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.309140] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.309140] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance bd62c682-24f2-4559-887a-03186409f699 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.309140] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 09a8469d-567c-4247-96eb-edf0f4040f65 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.309140] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 3af571ce-c400-45a1-97ad-4fbd53395129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.309344] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.309422] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 4366c94c-164d-4cb9-8f04-7f26db4c0d3c is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.309523] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 213b720b-b782-41c4-b60d-ef0af4b62932 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.309639] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance fab7d1eb-ef05-4498-aa6d-a524c3bb59c8 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 952.309753] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 04149a5c-d1b5-4d71-a1ca-44696506a40d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.309894] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Migration 491f93e8-1987-44d6-adf9-567206333bb4 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 952.310047] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 35817c87-0c55-49bd-917a-59bd39de663c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.310158] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 1f99ace3-1c5b-46ce-bb9c-74e139519da7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.310279] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 4a555172-a2a3-410b-a0fe-38964cee9a22 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 952.366704] env[68217]: DEBUG nova.network.neutron [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Updated VIF entry in instance network info cache for port 13f84539-5bd2-4d90-9636-4109e055cb5e. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 952.367062] env[68217]: DEBUG nova.network.neutron [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Updating instance_info_cache with network_info: [{"id": "13f84539-5bd2-4d90-9636-4109e055cb5e", "address": "fa:16:3e:d3:31:ab", "network": {"id": "9e8fc690-e99b-4d81-a587-00584139fd8d", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1676314619-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ebfeb38b81794c558c1164cecd7fa221", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap13f84539-5b", "ovs_interfaceid": "13f84539-5bd2-4d90-9636-4109e055cb5e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.608585] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961622, 'name': Destroy_Task, 'duration_secs': 0.323989} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.609487] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Destroyed the VM [ 952.612015] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 952.612015] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4bb9ddb6-112e-4852-9363-f4a6a7a4b019 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.618862] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 952.618862] env[68217]: value = "task-2961624" [ 952.618862] env[68217]: _type = "Task" [ 952.618862] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.628033] env[68217]: DEBUG oslo_vmware.api [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961623, 'name': PowerOnVM_Task, 'duration_secs': 0.395618} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.628815] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 952.629510] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52fac6bc-618b-427a-8967-a17916b0b506 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance '35817c87-0c55-49bd-917a-59bd39de663c' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 952.649126] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961624, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.659299] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 952.660024] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594331', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'name': 'volume-8c9d92b9-9da4-4b57-8504-b765266d4fee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f', 'attached_at': '', 'detached_at': '', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'serial': '8c9d92b9-9da4-4b57-8504-b765266d4fee'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 952.662193] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af62911a-e34c-4eb3-b4db-d2a7a8775482 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.685339] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a00c4dc-a255-4786-b980-9afba4e6f569 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.693967] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526c8236-dbf3-554b-71a4-263c138223d5, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.716582] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Reconfiguring VM instance instance-00000030 to attach disk [datastore2] volume-8c9d92b9-9da4-4b57-8504-b765266d4fee/volume-8c9d92b9-9da4-4b57-8504-b765266d4fee.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 952.717759] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-066e981e-e349-4725-94e8-7c4e579e46b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.720327] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a835b6f0-b509-4465-9871-1d875f1d1bd8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.736814] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 952.736814] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524445d8-bfc7-e1c1-5cb4-6cca4694913d" [ 952.736814] env[68217]: _type = "Task" [ 952.736814] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.738249] env[68217]: DEBUG oslo_vmware.api [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 952.738249] env[68217]: value = "task-2961625" [ 952.738249] env[68217]: _type = "Task" [ 952.738249] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.750572] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524445d8-bfc7-e1c1-5cb4-6cca4694913d, 'name': SearchDatastore_Task, 'duration_secs': 0.009154} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.753778] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.754060] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4a555172-a2a3-410b-a0fe-38964cee9a22/4a555172-a2a3-410b-a0fe-38964cee9a22.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.754326] env[68217]: DEBUG oslo_vmware.api [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961625, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.754525] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f5c8f30-4799-4241-8070-e37d4c262cd0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.762444] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 952.762444] env[68217]: value = "task-2961626" [ 952.762444] env[68217]: _type = "Task" [ 952.762444] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.771101] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961626, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.813809] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 776798bf-1ad4-4acb-ac58-cacc5493e1c7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 952.814123] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 13 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 952.814289] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3136MB phys_disk=200GB used_disk=13GB total_vcpus=48 used_vcpus=13 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 952.869838] env[68217]: DEBUG oslo_concurrency.lockutils [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] Releasing lock "refresh_cache-4a555172-a2a3-410b-a0fe-38964cee9a22" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 952.870183] env[68217]: DEBUG nova.compute.manager [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-changed-4e400891-4a77-45f0-a314-6963b2ea95f0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 952.870343] env[68217]: DEBUG nova.compute.manager [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Refreshing instance network info cache due to event network-changed-4e400891-4a77-45f0-a314-6963b2ea95f0. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 952.872664] env[68217]: DEBUG oslo_concurrency.lockutils [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] Acquiring lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 952.872664] env[68217]: DEBUG oslo_concurrency.lockutils [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] Acquired lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 952.872664] env[68217]: DEBUG nova.network.neutron [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Refreshing network info cache for port 4e400891-4a77-45f0-a314-6963b2ea95f0 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 952.882080] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Successfully updated port: c5a3c187-8daf-4f2c-b103-d26c4f4b1792 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.135491] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961624, 'name': RemoveSnapshot_Task, 'duration_secs': 0.394154} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.135858] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 953.136187] env[68217]: DEBUG nova.compute.manager [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 953.137158] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8a63ba9-edf5-4388-9713-ccca7a231672 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.217438] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1849a76-fb18-4bfd-856f-43e82499365f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.228530] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9f01b1a-8e7b-4041-b901-7c39db523106 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.264975] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-752c8c88-ef34-4135-b85d-c25fe9cb5d02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.282014] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961626, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.487279} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.282324] env[68217]: DEBUG oslo_vmware.api [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961625, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.282699] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 4a555172-a2a3-410b-a0fe-38964cee9a22/4a555172-a2a3-410b-a0fe-38964cee9a22.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.282957] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.284239] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2db67f4b-fd17-473b-92e2-1ead5c07f167 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.289977] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ca3bfc5e-2027-4970-8782-3384070cf52e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.304266] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 953.307193] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 953.307193] env[68217]: value = "task-2961627" [ 953.307193] env[68217]: _type = "Task" [ 953.307193] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.315173] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961627, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.384282] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.409855] env[68217]: DEBUG nova.network.neutron [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 953.505881] env[68217]: DEBUG nova.network.neutron [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 953.640496] env[68217]: DEBUG nova.compute.manager [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-vif-plugged-c5a3c187-8daf-4f2c-b103-d26c4f4b1792 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.640715] env[68217]: DEBUG oslo_concurrency.lockutils [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] Acquiring lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 953.641335] env[68217]: DEBUG oslo_concurrency.lockutils [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 953.641577] env[68217]: DEBUG oslo_concurrency.lockutils [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 953.641696] env[68217]: DEBUG nova.compute.manager [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] No waiting events found dispatching network-vif-plugged-c5a3c187-8daf-4f2c-b103-d26c4f4b1792 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 953.641850] env[68217]: WARNING nova.compute.manager [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received unexpected event network-vif-plugged-c5a3c187-8daf-4f2c-b103-d26c4f4b1792 for instance with vm_state building and task_state spawning. [ 953.641994] env[68217]: DEBUG nova.compute.manager [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-changed-c5a3c187-8daf-4f2c-b103-d26c4f4b1792 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 953.642210] env[68217]: DEBUG nova.compute.manager [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Refreshing instance network info cache due to event network-changed-c5a3c187-8daf-4f2c-b103-d26c4f4b1792. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 953.642393] env[68217]: DEBUG oslo_concurrency.lockutils [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] Acquiring lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.652356] env[68217]: INFO nova.compute.manager [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Shelve offloading [ 953.771730] env[68217]: DEBUG oslo_vmware.api [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961625, 'name': ReconfigVM_Task, 'duration_secs': 0.653984} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.772023] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Reconfigured VM instance instance-00000030 to attach disk [datastore2] volume-8c9d92b9-9da4-4b57-8504-b765266d4fee/volume-8c9d92b9-9da4-4b57-8504-b765266d4fee.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 953.777033] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e4306c8-ca86-4994-8eb7-57123deb299e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.791953] env[68217]: DEBUG oslo_vmware.api [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 953.791953] env[68217]: value = "task-2961628" [ 953.791953] env[68217]: _type = "Task" [ 953.791953] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.800089] env[68217]: DEBUG oslo_vmware.api [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961628, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.809088] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 953.822081] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961627, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071953} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.822372] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.823558] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bc774c-7b6e-4d0d-951d-e27e1f11ebd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.848102] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] 4a555172-a2a3-410b-a0fe-38964cee9a22/4a555172-a2a3-410b-a0fe-38964cee9a22.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.848459] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e71bf005-9f4f-42e0-8c60-4f601feeca27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.870607] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 953.870607] env[68217]: value = "task-2961629" [ 953.870607] env[68217]: _type = "Task" [ 953.870607] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.879409] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961629, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.010370] env[68217]: DEBUG oslo_concurrency.lockutils [req-e71bd8b3-e743-449c-b750-63f29544dc09 req-521d4fed-59a2-40fb-8911-9ef0671ab145 service nova] Releasing lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 954.010829] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.011007] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.156335] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 954.156686] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-de82ef9b-9921-4f70-bd7b-1a6766c1a157 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.170782] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 954.170782] env[68217]: value = "task-2961630" [ 954.170782] env[68217]: _type = "Task" [ 954.170782] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.180726] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 954.180945] env[68217]: DEBUG nova.compute.manager [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 954.181757] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf995ab9-e683-49a0-8b0b-b519b9fc4188 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.188365] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.188365] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 954.188365] env[68217]: DEBUG nova.network.neutron [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 954.302282] env[68217]: DEBUG oslo_vmware.api [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961628, 'name': ReconfigVM_Task, 'duration_secs': 0.215996} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.302612] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594331', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'name': 'volume-8c9d92b9-9da4-4b57-8504-b765266d4fee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f', 'attached_at': '', 'detached_at': '', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'serial': '8c9d92b9-9da4-4b57-8504-b765266d4fee'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 954.318081] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 954.318312] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.064s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.318808] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 17.137s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.318995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.321132] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.619s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.321327] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.323743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.655s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.323743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.325428] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.157s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.325699] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.327323] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.667s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.327607] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.329163] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.092s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.329380] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.331357] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.746s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.332991] env[68217]: INFO nova.compute.claims [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 954.358958] env[68217]: INFO nova.scheduler.client.report [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Deleted allocations for instance 0552d616-a406-4dfa-8a70-82f39fb98bbc [ 954.368726] env[68217]: INFO nova.scheduler.client.report [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Deleted allocations for instance 4366c94c-164d-4cb9-8f04-7f26db4c0d3c [ 954.386990] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961629, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.389558] env[68217]: INFO nova.scheduler.client.report [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted allocations for instance 213b720b-b782-41c4-b60d-ef0af4b62932 [ 954.395561] env[68217]: INFO nova.scheduler.client.report [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted allocations for instance 580e6909-7d05-447a-a378-f0b8b71f059a [ 954.405172] env[68217]: INFO nova.scheduler.client.report [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted allocations for instance 09a8469d-567c-4247-96eb-edf0f4040f65 [ 954.412638] env[68217]: INFO nova.scheduler.client.report [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted allocations for instance fab7d1eb-ef05-4498-aa6d-a524c3bb59c8 [ 954.572860] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 954.874476] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d1aea037-6f59-4d2b-891a-c24d934281cb tempest-ServersTestBootFromVolume-268139603 tempest-ServersTestBootFromVolume-268139603-project-member] Lock "0552d616-a406-4dfa-8a70-82f39fb98bbc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.536s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.885452] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4648b2e1-d2d9-4e20-bca7-039aa2dae04d tempest-ServerShowV254Test-596986300 tempest-ServerShowV254Test-596986300-project-member] Lock "4366c94c-164d-4cb9-8f04-7f26db4c0d3c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 20.680s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.891048] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961629, 'name': ReconfigVM_Task, 'duration_secs': 0.557554} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.891503] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Reconfigured VM instance instance-00000056 to attach disk [datastore2] 4a555172-a2a3-410b-a0fe-38964cee9a22/4a555172-a2a3-410b-a0fe-38964cee9a22.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 954.892215] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-04ae93ac-0cdc-463d-bee8-c9d84d769b03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.905262] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 954.905262] env[68217]: value = "task-2961631" [ 954.905262] env[68217]: _type = "Task" [ 954.905262] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.905667] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d834ced1-9f90-480d-82e0-8f545cd8a9ce tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "580e6909-7d05-447a-a378-f0b8b71f059a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.046s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.911102] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62ff3193-d53c-4e02-b457-274e575309bd tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "213b720b-b782-41c4-b60d-ef0af4b62932" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.169s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.915983] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4314d2fb-b5b0-4259-a184-f10ac0a6ce01 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "09a8469d-567c-4247-96eb-edf0f4040f65" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.273s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.921412] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961631, 'name': Rename_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.921955] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0ea2b097-c83f-4cf9-be50-5e12237c6353 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "fab7d1eb-ef05-4498-aa6d-a524c3bb59c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 18.706s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 954.978743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "35817c87-0c55-49bd-917a-59bd39de663c" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 954.979069] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 954.979548] env[68217]: DEBUG nova.compute.manager [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Going to confirm migration 4 {{(pid=68217) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 955.368530] env[68217]: DEBUG nova.objects.instance [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.375531] env[68217]: DEBUG nova.network.neutron [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": "c4ffafa7-b375-4f41-90e8-0db42f248139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.415886] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961631, 'name': Rename_Task, 'duration_secs': 0.143433} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.416182] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 955.416480] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-53e1289c-7dcf-4376-a237-37c412181df7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.423977] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 955.423977] env[68217]: value = "task-2961632" [ 955.423977] env[68217]: _type = "Task" [ 955.423977] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.432369] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961632, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.566710] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.566710] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 955.566710] env[68217]: DEBUG nova.network.neutron [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 955.566710] env[68217]: DEBUG nova.objects.instance [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lazy-loading 'info_cache' on Instance uuid 35817c87-0c55-49bd-917a-59bd39de663c {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 955.579522] env[68217]: DEBUG nova.network.neutron [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Updating instance_info_cache with network_info: [{"id": "f42ea21b-6ba7-4a8c-846e-852bfff623e1", "address": "fa:16:3e:94:4b:54", "network": {"id": "6883c570-1947-4515-9f2d-e9ad4510b03c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1933076446", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf42ea21b-6b", "ovs_interfaceid": "f42ea21b-6ba7-4a8c-846e-852bfff623e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4e400891-4a77-45f0-a314-6963b2ea95f0", "address": "fa:16:3e:53:57:e3", "network": {"id": "8b9ea310-5de2-4f75-9327-786e8592284c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755846239", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e400891-4a", "ovs_interfaceid": "4e400891-4a77-45f0-a314-6963b2ea95f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c5a3c187-8daf-4f2c-b103-d26c4f4b1792", "address": "fa:16:3e:f4:0a:ed", "network": {"id": "6883c570-1947-4515-9f2d-e9ad4510b03c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1933076446", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5a3c187-8d", "ovs_interfaceid": "c5a3c187-8daf-4f2c-b103-d26c4f4b1792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.661080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e27ac7f-fcdc-48ae-a7ad-9789d8fc7f18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.670269] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76d1cf01-3e1a-4f8c-a91c-4865c9876d11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.701854] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abcbdcba-d5aa-49e4-934a-7d889e2e0d37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.710145] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c2d65b-f9d5-4c90-af6f-c58eeb591f70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.723659] env[68217]: DEBUG nova.compute.provider_tree [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.877264] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b6693cf3-3c12-449c-af32-ffd51f3a8b0f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.359s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 955.884168] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 955.941899] env[68217]: DEBUG oslo_vmware.api [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961632, 'name': PowerOnVM_Task, 'duration_secs': 0.464075} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.942205] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 955.942406] env[68217]: INFO nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Took 7.70 seconds to spawn the instance on the hypervisor. [ 955.942584] env[68217]: DEBUG nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 955.944270] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9febc525-a941-43ac-8f64-ddb89fca2cc7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.956916] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "01f97d0d-df21-441c-9dc6-5c51e3798d81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 955.957154] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.086206] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Releasing lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 956.087047] env[68217]: DEBUG nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Instance network_info: |[{"id": "f42ea21b-6ba7-4a8c-846e-852bfff623e1", "address": "fa:16:3e:94:4b:54", "network": {"id": "6883c570-1947-4515-9f2d-e9ad4510b03c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1933076446", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf42ea21b-6b", "ovs_interfaceid": "f42ea21b-6ba7-4a8c-846e-852bfff623e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4e400891-4a77-45f0-a314-6963b2ea95f0", "address": "fa:16:3e:53:57:e3", "network": {"id": "8b9ea310-5de2-4f75-9327-786e8592284c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755846239", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e400891-4a", "ovs_interfaceid": "4e400891-4a77-45f0-a314-6963b2ea95f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c5a3c187-8daf-4f2c-b103-d26c4f4b1792", "address": "fa:16:3e:f4:0a:ed", "network": {"id": "6883c570-1947-4515-9f2d-e9ad4510b03c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1933076446", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5a3c187-8d", "ovs_interfaceid": "c5a3c187-8daf-4f2c-b103-d26c4f4b1792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 956.087307] env[68217]: DEBUG oslo_concurrency.lockutils [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] Acquired lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.087521] env[68217]: DEBUG nova.network.neutron [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Refreshing network info cache for port c5a3c187-8daf-4f2c-b103-d26c4f4b1792 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 956.089194] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:94:4b:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f42ea21b-6ba7-4a8c-846e-852bfff623e1', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:57:e3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd69a4b11-8d65-435f-94a5-28f74a39a718', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4e400891-4a77-45f0-a314-6963b2ea95f0', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:0a:ed', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '715e3f37-7401-48fb-a0ee-59d340b40de1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c5a3c187-8daf-4f2c-b103-d26c4f4b1792', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 956.101271] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Creating folder: Project (905b15e740ad4f879ba61518ba400680). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.102450] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-da1d9676-022b-46c6-99b5-bf2c9773c44e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.115311] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Created folder: Project (905b15e740ad4f879ba61518ba400680) in parent group-v594094. [ 956.115311] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Creating folder: Instances. Parent ref: group-v594333. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 956.116552] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14e97645-41d0-4ea9-83c7-5a58b6c5674d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.122425] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "105e6181-19c4-466b-88a0-cdbca2cac230" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.122597] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "105e6181-19c4-466b-88a0-cdbca2cac230" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.131395] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Created folder: Instances in parent group-v594333. [ 956.131571] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 956.132047] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 956.132047] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a0c6778f-46dd-43c5-ac74-231fc170fd4b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.158688] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 956.158688] env[68217]: value = "task-2961635" [ 956.158688] env[68217]: _type = "Task" [ 956.158688] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.166294] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961635, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.230148] env[68217]: DEBUG nova.scheduler.client.report [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 956.237022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.237022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.237022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.237022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.237022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.237022] env[68217]: INFO nova.compute.manager [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Terminating instance [ 956.291438] env[68217]: DEBUG oslo_concurrency.lockutils [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.291438] env[68217]: DEBUG oslo_concurrency.lockutils [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.291438] env[68217]: DEBUG nova.compute.manager [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 956.293100] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8605344-93a8-4abf-8b77-553a4e1e7402 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.300214] env[68217]: DEBUG nova.compute.manager [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 956.300822] env[68217]: DEBUG nova.objects.instance [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 956.384395] env[68217]: DEBUG nova.compute.manager [req-afde02ec-b921-43af-8fcf-5ba98522392b req-de56eed4-e94b-44f6-8882-1e9b5c31ac10 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-vif-unplugged-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 956.384686] env[68217]: DEBUG oslo_concurrency.lockutils [req-afde02ec-b921-43af-8fcf-5ba98522392b req-de56eed4-e94b-44f6-8882-1e9b5c31ac10 service nova] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.384838] env[68217]: DEBUG oslo_concurrency.lockutils [req-afde02ec-b921-43af-8fcf-5ba98522392b req-de56eed4-e94b-44f6-8882-1e9b5c31ac10 service nova] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.385107] env[68217]: DEBUG oslo_concurrency.lockutils [req-afde02ec-b921-43af-8fcf-5ba98522392b req-de56eed4-e94b-44f6-8882-1e9b5c31ac10 service nova] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.385403] env[68217]: DEBUG nova.compute.manager [req-afde02ec-b921-43af-8fcf-5ba98522392b req-de56eed4-e94b-44f6-8882-1e9b5c31ac10 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] No waiting events found dispatching network-vif-unplugged-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 956.385651] env[68217]: WARNING nova.compute.manager [req-afde02ec-b921-43af-8fcf-5ba98522392b req-de56eed4-e94b-44f6-8882-1e9b5c31ac10 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received unexpected event network-vif-unplugged-c4ffafa7-b375-4f41-90e8-0db42f248139 for instance with vm_state shelved and task_state shelving_offloading. [ 956.462423] env[68217]: DEBUG nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 956.468897] env[68217]: INFO nova.compute.manager [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Took 27.40 seconds to build instance. [ 956.532814] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.534104] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84beec07-e521-44ef-9b4e-df6254e2ea16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.541826] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 956.542118] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-de4d3cf2-7ea5-4a9a-97d3-52dc5f944ace {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.628505] env[68217]: DEBUG nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 956.631729] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 956.635309] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 956.635309] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleting the datastore file [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 956.635309] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-dcd627b4-3213-4720-8018-97d6ef29b948 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.639313] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 956.639313] env[68217]: value = "task-2961637" [ 956.639313] env[68217]: _type = "Task" [ 956.639313] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.650153] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961637, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.674906] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961635, 'name': CreateVM_Task, 'duration_secs': 0.481566} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.678950] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.679936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.680906] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 956.681761] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 956.682666] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e9b6309-570b-43ec-a66f-7b91fdabd0bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.690148] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 956.690148] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d4daf1-94f8-105c-1588-c18f5dc1f6ed" [ 956.690148] env[68217]: _type = "Task" [ 956.690148] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.705542] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d4daf1-94f8-105c-1588-c18f5dc1f6ed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.741211] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.407s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.741211] env[68217]: DEBUG nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 956.749275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.057s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.749827] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.752294] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.774s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.752645] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.755673] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.656s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.756112] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.759438] env[68217]: DEBUG nova.compute.manager [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 956.759826] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 956.764686] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc63e31-7b65-410a-a26a-323e650d39a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.777639] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 956.780052] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-15eae5c4-c714-4e66-94a8-844a9ed5b2c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.785770] env[68217]: DEBUG oslo_vmware.api [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 956.785770] env[68217]: value = "task-2961638" [ 956.785770] env[68217]: _type = "Task" [ 956.785770] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.788407] env[68217]: INFO nova.scheduler.client.report [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Deleted allocations for instance fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb [ 956.790843] env[68217]: INFO nova.scheduler.client.report [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Deleted allocations for instance da1524a7-2756-4429-ada2-b1f493544bd2 [ 956.811655] env[68217]: INFO nova.scheduler.client.report [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Deleted allocations for instance 7584180b-efa6-4038-9f3a-619ab7937553 [ 956.820609] env[68217]: DEBUG oslo_vmware.api [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961638, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.969388] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3468db-faaf-4e4a-b1e7-e66a223c1e2a tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.921s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 956.986772] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 956.987050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 956.988515] env[68217]: INFO nova.compute.claims [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 957.128878] env[68217]: DEBUG nova.network.neutron [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance_info_cache with network_info: [{"id": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "address": "fa:16:3e:e8:e5:1d", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63b92bd9-8f", "ovs_interfaceid": "63b92bd9-8f95-481e-9ef4-468ea20dade1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.133938] env[68217]: DEBUG nova.network.neutron [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Updated VIF entry in instance network info cache for port c5a3c187-8daf-4f2c-b103-d26c4f4b1792. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 957.134454] env[68217]: DEBUG nova.network.neutron [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Updating instance_info_cache with network_info: [{"id": "f42ea21b-6ba7-4a8c-846e-852bfff623e1", "address": "fa:16:3e:94:4b:54", "network": {"id": "6883c570-1947-4515-9f2d-e9ad4510b03c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1933076446", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.151", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf42ea21b-6b", "ovs_interfaceid": "f42ea21b-6ba7-4a8c-846e-852bfff623e1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "4e400891-4a77-45f0-a314-6963b2ea95f0", "address": "fa:16:3e:53:57:e3", "network": {"id": "8b9ea310-5de2-4f75-9327-786e8592284c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-755846239", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.107", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4e400891-4a", "ovs_interfaceid": "4e400891-4a77-45f0-a314-6963b2ea95f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "c5a3c187-8daf-4f2c-b103-d26c4f4b1792", "address": "fa:16:3e:f4:0a:ed", "network": {"id": "6883c570-1947-4515-9f2d-e9ad4510b03c", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1933076446", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.57", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "715e3f37-7401-48fb-a0ee-59d340b40de1", "external-id": "nsx-vlan-transportzone-739", "segmentation_id": 739, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc5a3c187-8d", "ovs_interfaceid": "c5a3c187-8daf-4f2c-b103-d26c4f4b1792", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.153104] env[68217]: DEBUG oslo_vmware.api [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961637, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180916} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.153411] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.153614] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.153811] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.166871] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.186161] env[68217]: INFO nova.scheduler.client.report [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted allocations for instance d14026b1-84dd-430e-be94-94dcb1f47473 [ 957.203621] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d4daf1-94f8-105c-1588-c18f5dc1f6ed, 'name': SearchDatastore_Task, 'duration_secs': 0.017749} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.203621] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.205798] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 957.205798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.205798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 957.205798] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 957.205798] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ae060f70-6e00-4667-8c2f-65b15319651e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.218995] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 957.218995] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 957.218995] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d9df5e0-86db-44a8-8bf3-8ed0dc6ad965 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.225255] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 957.225255] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d566e5-95bf-7c31-e291-1059ce77b5fc" [ 957.225255] env[68217]: _type = "Task" [ 957.225255] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.235756] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d566e5-95bf-7c31-e291-1059ce77b5fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.264918] env[68217]: DEBUG nova.compute.utils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 957.267428] env[68217]: DEBUG nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 957.267754] env[68217]: DEBUG nova.network.neutron [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 957.299813] env[68217]: DEBUG oslo_vmware.api [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961638, 'name': PowerOffVM_Task, 'duration_secs': 0.329358} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.300144] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.300399] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.300634] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-26b0d4a5-c687-4fb5-8600-6f90391b7a9c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.312614] env[68217]: DEBUG oslo_concurrency.lockutils [None req-df78105d-95ef-43f4-8b73-4e2c5dbc7230 tempest-VolumesAdminNegativeTest-1732763103 tempest-VolumesAdminNegativeTest-1732763103-project-member] Lock "da1524a7-2756-4429-ada2-b1f493544bd2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.898s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.314583] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19531c9b-8076-441b-b643-6568a6b399a4 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 17.145s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.321723] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.324315] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bf73a89f-8ca7-4a84-bf17-e4608f27b4bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.327084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-aad97ce6-f9a6-4574-baf0-32203621cc73 tempest-ServersWithSpecificFlavorTestJSON-1554550582 tempest-ServersWithSpecificFlavorTestJSON-1554550582-project-member] Lock "7584180b-efa6-4038-9f3a-619ab7937553" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.260s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 957.332930] env[68217]: DEBUG oslo_vmware.api [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 957.332930] env[68217]: value = "task-2961640" [ 957.332930] env[68217]: _type = "Task" [ 957.332930] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.342698] env[68217]: DEBUG oslo_vmware.api [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961640, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.369249] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.369499] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.369715] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleting the datastore file [datastore2] ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.370037] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc725b3e-0198-43ad-b44b-a89db562bbd8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.375524] env[68217]: DEBUG nova.policy [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd229078579a54e6991e85bc49326c0b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3068bf39ee943f1bdf378f8b2a5c360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 957.379128] env[68217]: DEBUG oslo_vmware.api [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 957.379128] env[68217]: value = "task-2961641" [ 957.379128] env[68217]: _type = "Task" [ 957.379128] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.386956] env[68217]: DEBUG oslo_vmware.api [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961641, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.631742] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-35817c87-0c55-49bd-917a-59bd39de663c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.631742] env[68217]: DEBUG nova.objects.instance [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lazy-loading 'migration_context' on Instance uuid 35817c87-0c55-49bd-917a-59bd39de663c {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 957.639621] env[68217]: DEBUG oslo_concurrency.lockutils [req-24db96a8-05c6-42fa-9149-aaa7cf5f29ed req-3e2e30dd-26eb-4e67-856c-554a27e8c68e service nova] Releasing lock "refresh_cache-1f99ace3-1c5b-46ce-bb9c-74e139519da7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 957.692192] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 957.736172] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d566e5-95bf-7c31-e291-1059ce77b5fc, 'name': SearchDatastore_Task, 'duration_secs': 0.019319} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.737112] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9c48961f-ff19-4d48-a443-b67f334d343e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.749368] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 957.749368] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d49e48-fc58-3a1b-0116-855bf76cb75b" [ 957.749368] env[68217]: _type = "Task" [ 957.749368] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.758493] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d49e48-fc58-3a1b-0116-855bf76cb75b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.767536] env[68217]: DEBUG nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 957.845928] env[68217]: DEBUG oslo_vmware.api [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961640, 'name': PowerOffVM_Task, 'duration_secs': 0.217068} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.846476] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.846693] env[68217]: DEBUG nova.compute.manager [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 957.847461] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4f92fb-31a8-43f5-8834-79bff7e447fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.890010] env[68217]: DEBUG oslo_vmware.api [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961641, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.307064} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.890693] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 957.890919] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 957.891114] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 957.891306] env[68217]: INFO nova.compute.manager [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Took 1.13 seconds to destroy the instance on the hypervisor. [ 957.891552] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 957.892736] env[68217]: DEBUG nova.compute.manager [-] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 957.892843] env[68217]: DEBUG nova.network.neutron [-] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 958.137504] env[68217]: DEBUG nova.objects.base [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Object Instance<35817c87-0c55-49bd-917a-59bd39de663c> lazy-loaded attributes: info_cache,migration_context {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 958.138693] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc0b4fa2-310d-4e7c-8ee4-8995ecebb996 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.169224] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13d0bfd6-5403-4c3a-b733-88d2040cc1de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.175319] env[68217]: DEBUG oslo_vmware.api [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 958.175319] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52da564f-ab50-9981-6942-a08c382c5944" [ 958.175319] env[68217]: _type = "Task" [ 958.175319] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.184029] env[68217]: DEBUG oslo_vmware.api [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52da564f-ab50-9981-6942-a08c382c5944, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.251789] env[68217]: DEBUG nova.compute.manager [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 958.252873] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f79c59-4ded-4283-9fe3-a7ce8d829124 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.272805] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d49e48-fc58-3a1b-0116-855bf76cb75b, 'name': SearchDatastore_Task, 'duration_secs': 0.02262} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.280125] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 958.280125] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1f99ace3-1c5b-46ce-bb9c-74e139519da7/1f99ace3-1c5b-46ce-bb9c-74e139519da7.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 958.280865] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca8c9be9-e0b1-4bf2-a191-162b5df8510e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.290435] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 958.290435] env[68217]: value = "task-2961642" [ 958.290435] env[68217]: _type = "Task" [ 958.290435] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.298684] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961642, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.305018] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-334b08c3-70e8-4238-9108-4c7fb1cd071c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.313109] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac32c5e-d371-433f-8b30-0189369051eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.352497] env[68217]: DEBUG nova.network.neutron [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Successfully created port: c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 958.355238] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5049e98f-0353-4192-ac04-96a4f5908e07 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.363483] env[68217]: DEBUG oslo_concurrency.lockutils [None req-11f21e38-d133-43b2-b8d7-e0995879eca5 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.072s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 958.368705] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-082378b5-c52a-4136-ae30-fb349aab2d5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.384640] env[68217]: DEBUG nova.compute.provider_tree [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.687686] env[68217]: DEBUG oslo_vmware.api [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52da564f-ab50-9981-6942-a08c382c5944, 'name': SearchDatastore_Task, 'duration_secs': 0.013065} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.688103] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 958.782457] env[68217]: INFO nova.compute.manager [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] instance snapshotting [ 958.786260] env[68217]: DEBUG nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 958.790889] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5146b18-ae7a-4bc6-a4ef-5c9d5d9cabd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.837730] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c476513b-e5d0-4b7a-b5b4-4fc75b301700 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.841064] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961642, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.852766] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 958.853140] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 958.853399] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 958.853703] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 958.853958] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 958.854248] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 958.854571] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 958.854827] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 958.855110] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 958.855390] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 958.855669] env[68217]: DEBUG nova.virt.hardware [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 958.856893] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff95f5f-97a5-476e-b615-2c4b32f51b3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.862243] env[68217]: DEBUG nova.network.neutron [-] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.870049] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1f12070-0e07-45d5-9259-efc3047bd85c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.892114] env[68217]: DEBUG nova.scheduler.client.report [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 959.178886] env[68217]: DEBUG nova.compute.manager [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 959.179084] env[68217]: DEBUG nova.compute.manager [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing instance network info cache due to event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 959.179310] env[68217]: DEBUG oslo_concurrency.lockutils [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] Acquiring lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 959.179460] env[68217]: DEBUG oslo_concurrency.lockutils [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] Acquired lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 959.179627] env[68217]: DEBUG nova.network.neutron [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 959.306042] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961642, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.564427} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.309272] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1f99ace3-1c5b-46ce-bb9c-74e139519da7/1f99ace3-1c5b-46ce-bb9c-74e139519da7.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 959.311021] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 959.311021] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d68c1b38-f66f-417b-8b09-54feddd6ae40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.318275] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 959.318275] env[68217]: value = "task-2961643" [ 959.318275] env[68217]: _type = "Task" [ 959.318275] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.328886] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961643, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.351438] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 959.351795] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6c5a33c3-f836-42eb-82ce-8f0c675a8cab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.359913] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 959.359913] env[68217]: value = "task-2961644" [ 959.359913] env[68217]: _type = "Task" [ 959.359913] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.369797] env[68217]: INFO nova.compute.manager [-] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Took 1.48 seconds to deallocate network for instance. [ 959.370237] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961644, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.398809] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.412s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 959.399373] env[68217]: DEBUG nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 959.402372] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.235s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.404569] env[68217]: INFO nova.compute.claims [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 959.835291] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961643, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066695} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.835615] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 959.836506] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-822547ac-118c-4792-a90f-9b96fc2d8c5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.869849] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Reconfiguring VM instance instance-00000055 to attach disk [datastore1] 1f99ace3-1c5b-46ce-bb9c-74e139519da7/1f99ace3-1c5b-46ce-bb9c-74e139519da7.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 959.877088] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-414061eb-e0df-4f0f-a49e-c8d97e0ac3a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.899137] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.903716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 959.903891] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 959.911164] env[68217]: DEBUG nova.compute.utils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 959.914469] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961644, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.917554] env[68217]: DEBUG nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 959.917736] env[68217]: DEBUG nova.network.neutron [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 959.920610] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 959.920610] env[68217]: value = "task-2961645" [ 959.920610] env[68217]: _type = "Task" [ 959.920610] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.932825] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961645, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.002163] env[68217]: DEBUG nova.policy [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '554b6b3d22404c0ba52c739b3c7b98a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb62d18446841a3b2a6ac25ab5dc869', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 960.095851] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "bd62c682-24f2-4559-887a-03186409f699" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.096256] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.096597] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "bd62c682-24f2-4559-887a-03186409f699-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.096891] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 960.097038] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 960.099182] env[68217]: INFO nova.compute.manager [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Terminating instance [ 960.133988] env[68217]: DEBUG nova.network.neutron [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updated VIF entry in instance network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 960.134208] env[68217]: DEBUG nova.network.neutron [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 960.375954] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961644, 'name': CreateSnapshot_Task, 'duration_secs': 0.823559} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.375954] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 960.379244] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a759360-cc5d-41cd-882e-58d8ceb5ad1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.398538] env[68217]: DEBUG nova.objects.instance [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.407993] env[68217]: DEBUG nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 960.423160] env[68217]: DEBUG nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 960.439582] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961645, 'name': ReconfigVM_Task, 'duration_secs': 0.392598} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.439888] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Reconfigured VM instance instance-00000055 to attach disk [datastore1] 1f99ace3-1c5b-46ce-bb9c-74e139519da7/1f99ace3-1c5b-46ce-bb9c-74e139519da7.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.442229] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-249a3b0b-0bb8-41e9-9948-3634f01079b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.450540] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 960.450540] env[68217]: value = "task-2961646" [ 960.450540] env[68217]: _type = "Task" [ 960.450540] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.459738] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961646, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.476600] env[68217]: DEBUG nova.network.neutron [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Successfully created port: 87cee679-c308-4c4f-b1b9-3f6cc809233e {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 960.607313] env[68217]: DEBUG nova.compute.manager [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 960.608651] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 960.609162] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e79e412-4ac4-43a5-b406-49cb916b3603 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.619352] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.619632] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1bc65a21-830c-42ca-af68-5b34f6851b03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.632299] env[68217]: DEBUG oslo_vmware.api [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 960.632299] env[68217]: value = "task-2961647" [ 960.632299] env[68217]: _type = "Task" [ 960.632299] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.637516] env[68217]: DEBUG oslo_concurrency.lockutils [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] Releasing lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 960.637516] env[68217]: DEBUG nova.compute.manager [req-179e9990-8479-499f-8b39-622c5ff5b5fd req-27462269-7ffa-4bf0-b199-6e3eb7918a1f service nova] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Received event network-vif-deleted-b1fdb191-647f-40ba-a7b4-d58ba0a6a2d4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 960.647877] env[68217]: DEBUG oslo_vmware.api [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961647, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.735132] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b512e57b-dff0-432c-ba17-e652b2fcf2cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.745696] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1821bb75-d303-4e7d-8cc5-833d43bc5a2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.793583] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29d4e940-56e1-4989-ab7c-5f0bdf7c88d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.802680] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22ebda95-24d1-4a27-9ed9-fd87ad1463aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.820995] env[68217]: DEBUG nova.compute.provider_tree [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 960.897336] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 960.898102] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6d80c1af-6610-42e6-8973-ada9519a1a4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.903871] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.904054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 960.904219] env[68217]: DEBUG nova.network.neutron [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 960.904392] env[68217]: DEBUG nova.objects.instance [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'info_cache' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.909121] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 960.909121] env[68217]: value = "task-2961648" [ 960.909121] env[68217]: _type = "Task" [ 960.909121] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.921597] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961648, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.936448] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 960.961345] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961646, 'name': Rename_Task, 'duration_secs': 0.174099} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.963371] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.963371] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a4f51054-8b51-4e29-9b99-e1f32658a412 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.968759] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 960.968759] env[68217]: value = "task-2961649" [ 960.968759] env[68217]: _type = "Task" [ 960.968759] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.975952] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961649, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.999783] env[68217]: DEBUG nova.network.neutron [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Successfully updated port: c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 961.136021] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.145972] env[68217]: DEBUG oslo_vmware.api [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961647, 'name': PowerOffVM_Task, 'duration_secs': 0.242144} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.145972] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.145972] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.145972] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c303e0f1-3431-4e33-99ab-d638a90f1b76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.239296] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.239296] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.239296] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Deleting the datastore file [datastore2] bd62c682-24f2-4559-887a-03186409f699 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.239296] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f7f1f6be-b91d-43c3-a19d-6c21b54e3bc6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.245761] env[68217]: DEBUG oslo_vmware.api [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for the task: (returnval){ [ 961.245761] env[68217]: value = "task-2961651" [ 961.245761] env[68217]: _type = "Task" [ 961.245761] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.257828] env[68217]: DEBUG oslo_vmware.api [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961651, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.326845] env[68217]: DEBUG nova.scheduler.client.report [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 961.341356] env[68217]: DEBUG nova.compute.manager [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Received event network-vif-plugged-c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.341611] env[68217]: DEBUG oslo_concurrency.lockutils [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] Acquiring lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 961.341771] env[68217]: DEBUG oslo_concurrency.lockutils [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.341938] env[68217]: DEBUG oslo_concurrency.lockutils [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.345610] env[68217]: DEBUG nova.compute.manager [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] No waiting events found dispatching network-vif-plugged-c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 961.345610] env[68217]: WARNING nova.compute.manager [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Received unexpected event network-vif-plugged-c9ba3459-1c52-46fa-b8b7-7f41a840a334 for instance with vm_state building and task_state spawning. [ 961.345610] env[68217]: DEBUG nova.compute.manager [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Received event network-changed-c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 961.345610] env[68217]: DEBUG nova.compute.manager [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Refreshing instance network info cache due to event network-changed-c9ba3459-1c52-46fa-b8b7-7f41a840a334. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 961.345610] env[68217]: DEBUG oslo_concurrency.lockutils [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] Acquiring lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.345610] env[68217]: DEBUG oslo_concurrency.lockutils [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] Acquired lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 961.345610] env[68217]: DEBUG nova.network.neutron [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Refreshing network info cache for port c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 961.408568] env[68217]: DEBUG nova.objects.base [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Object Instance<95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f> lazy-loaded attributes: flavor,info_cache {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 961.422752] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961648, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.435280] env[68217]: DEBUG nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 961.465675] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 961.465937] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 961.466511] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 961.466750] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 961.466904] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 961.467062] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 961.467279] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 961.467440] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 961.467605] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 961.467768] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 961.467928] env[68217]: DEBUG nova.virt.hardware [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 961.468801] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e63e0a-65fb-4675-9ef0-c1c326ad68d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.482309] env[68217]: DEBUG oslo_vmware.api [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961649, 'name': PowerOnVM_Task, 'duration_secs': 0.489362} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.484502] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.484730] env[68217]: INFO nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Took 15.79 seconds to spawn the instance on the hypervisor. [ 961.484917] env[68217]: DEBUG nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 961.485765] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8904def3-4f6a-481a-8635-aa7bb2702fd7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.490527] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58db28a5-f259-442c-9cd0-868e7f419632 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.512015] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 961.759337] env[68217]: DEBUG oslo_vmware.api [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Task: {'id': task-2961651, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19621} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.759594] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.759784] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.759961] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.760160] env[68217]: INFO nova.compute.manager [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [instance: bd62c682-24f2-4559-887a-03186409f699] Took 1.15 seconds to destroy the instance on the hypervisor. [ 961.760409] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 961.760614] env[68217]: DEBUG nova.compute.manager [-] [instance: bd62c682-24f2-4559-887a-03186409f699] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 961.760709] env[68217]: DEBUG nova.network.neutron [-] [instance: bd62c682-24f2-4559-887a-03186409f699] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 961.832212] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 961.832782] env[68217]: DEBUG nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 961.835716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.144s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 961.836527] env[68217]: DEBUG nova.objects.instance [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'resources' on Instance uuid d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 961.922704] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961648, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.024777] env[68217]: INFO nova.compute.manager [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Took 34.82 seconds to build instance. [ 962.066133] env[68217]: DEBUG nova.network.neutron [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 962.320602] env[68217]: DEBUG nova.network.neutron [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.343953] env[68217]: DEBUG nova.compute.utils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 962.345486] env[68217]: DEBUG nova.objects.instance [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'numa_topology' on Instance uuid d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 962.347659] env[68217]: DEBUG nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 962.347889] env[68217]: DEBUG nova.network.neutron [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 962.421939] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961648, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.523884] env[68217]: DEBUG nova.policy [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fd08981ea724019826d597a1c8b4ecd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6233e9874c41329f81c990f8bc72b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 962.528085] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1b02b6e3-6662-4337-871a-603f7be6e705 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 36.338s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.587714] env[68217]: DEBUG nova.network.neutron [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Successfully updated port: 87cee679-c308-4c4f-b1b9-3f6cc809233e {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 962.827488] env[68217]: DEBUG oslo_concurrency.lockutils [req-980fccac-a1d8-4f4a-95be-8b271acc352b req-f83cf117-817b-4fc9-9a5c-6306e779879d service nova] Releasing lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 962.827488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 962.827488] env[68217]: DEBUG nova.network.neutron [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 962.848202] env[68217]: DEBUG nova.objects.base [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 962.851286] env[68217]: DEBUG nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 962.868867] env[68217]: DEBUG nova.network.neutron [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updating instance_info_cache with network_info: [{"id": "86568bc3-8f1e-4880-9a22-48003fc7babd", "address": "fa:16:3e:92:6c:cf", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86568bc3-8f", "ovs_interfaceid": "86568bc3-8f1e-4880-9a22-48003fc7babd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.926222] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961648, 'name': CloneVM_Task, 'duration_secs': 1.897926} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.926507] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Created linked-clone VM from snapshot [ 962.927840] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8492b02-4351-4a7f-b481-15056010f231 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.936317] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Uploading image 6a8aa891-6c54-45d2-aa56-3de1711292a9 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 962.955019] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 962.956029] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-5b1afaf3-7a64-4fd2-aa3a-cc890ce7124c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.960058] env[68217]: DEBUG nova.compute.manager [req-040d5dab-9e92-4a4d-a79a-c1089077d1b0 req-5a555a97-ab2b-4e9f-98b0-8855985bd7b2 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Received event network-vif-plugged-87cee679-c308-4c4f-b1b9-3f6cc809233e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 962.960301] env[68217]: DEBUG oslo_concurrency.lockutils [req-040d5dab-9e92-4a4d-a79a-c1089077d1b0 req-5a555a97-ab2b-4e9f-98b0-8855985bd7b2 service nova] Acquiring lock "01f97d0d-df21-441c-9dc6-5c51e3798d81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 962.960541] env[68217]: DEBUG oslo_concurrency.lockutils [req-040d5dab-9e92-4a4d-a79a-c1089077d1b0 req-5a555a97-ab2b-4e9f-98b0-8855985bd7b2 service nova] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 962.961737] env[68217]: DEBUG oslo_concurrency.lockutils [req-040d5dab-9e92-4a4d-a79a-c1089077d1b0 req-5a555a97-ab2b-4e9f-98b0-8855985bd7b2 service nova] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 962.961737] env[68217]: DEBUG nova.compute.manager [req-040d5dab-9e92-4a4d-a79a-c1089077d1b0 req-5a555a97-ab2b-4e9f-98b0-8855985bd7b2 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] No waiting events found dispatching network-vif-plugged-87cee679-c308-4c4f-b1b9-3f6cc809233e {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 962.961737] env[68217]: WARNING nova.compute.manager [req-040d5dab-9e92-4a4d-a79a-c1089077d1b0 req-5a555a97-ab2b-4e9f-98b0-8855985bd7b2 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Received unexpected event network-vif-plugged-87cee679-c308-4c4f-b1b9-3f6cc809233e for instance with vm_state building and task_state spawning. [ 962.963421] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 962.963421] env[68217]: value = "task-2961652" [ 962.963421] env[68217]: _type = "Task" [ 962.963421] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.973795] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961652, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.091396] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-01f97d0d-df21-441c-9dc6-5c51e3798d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 963.091550] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-01f97d0d-df21-441c-9dc6-5c51e3798d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 963.091706] env[68217]: DEBUG nova.network.neutron [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 963.156047] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0418587-9f05-45a7-bd2b-53fc4c2755a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.164753] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b330031f-c740-4537-b67f-390892af94ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.203032] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c371b39d-f8d4-4b81-b1d0-e41d68f677f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.211286] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed8b6e1-fcdf-4695-ba0a-09e0f48034b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.225725] env[68217]: DEBUG nova.compute.provider_tree [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.282073] env[68217]: DEBUG nova.network.neutron [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Successfully created port: 1b0e0705-6a81-45e3-b5b0-832547fed562 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 963.345301] env[68217]: DEBUG nova.network.neutron [-] [instance: bd62c682-24f2-4559-887a-03186409f699] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.371299] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 963.378521] env[68217]: DEBUG nova.network.neutron [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.455027] env[68217]: DEBUG nova.compute.manager [req-b8d2389d-d712-4397-8281-d83fcd57c861 req-974266f0-01db-492d-81f4-feb738b22709 service nova] [instance: bd62c682-24f2-4559-887a-03186409f699] Received event network-vif-deleted-d1428ec3-01c4-4a36-9a5b-dba91c81f279 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 963.474054] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961652, 'name': Destroy_Task, 'duration_secs': 0.337267} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.474365] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Destroyed the VM [ 963.474602] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 963.474853] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-292691d5-b5bf-4fb6-be17-6c26e83b1a64 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.480963] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 963.480963] env[68217]: value = "task-2961653" [ 963.480963] env[68217]: _type = "Task" [ 963.480963] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 963.489323] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961653, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.545770] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.547457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.547457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 963.547457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 963.547457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 963.549589] env[68217]: INFO nova.compute.manager [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Terminating instance [ 963.697650] env[68217]: DEBUG nova.network.neutron [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 963.728650] env[68217]: DEBUG nova.scheduler.client.report [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 963.831507] env[68217]: DEBUG nova.network.neutron [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Updating instance_info_cache with network_info: [{"id": "c9ba3459-1c52-46fa-b8b7-7f41a840a334", "address": "fa:16:3e:a7:8a:0e", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ba3459-1c", "ovs_interfaceid": "c9ba3459-1c52-46fa-b8b7-7f41a840a334", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 963.849432] env[68217]: INFO nova.compute.manager [-] [instance: bd62c682-24f2-4559-887a-03186409f699] Took 2.09 seconds to deallocate network for instance. [ 963.865263] env[68217]: DEBUG nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 963.906878] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 963.907224] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 963.907318] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 963.907515] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 963.907671] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 963.908862] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 963.908862] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 963.908862] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 963.908862] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 963.908862] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 963.908862] env[68217]: DEBUG nova.virt.hardware [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 963.911595] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7df18a0d-349b-4cac-9538-8da7162b8e38 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.924499] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9dc184e-2cb9-479c-bc6c-b7aad648077c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.992278] env[68217]: DEBUG oslo_vmware.api [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961653, 'name': RemoveSnapshot_Task, 'duration_secs': 0.484856} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.992278] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 964.023958] env[68217]: DEBUG nova.network.neutron [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Updating instance_info_cache with network_info: [{"id": "87cee679-c308-4c4f-b1b9-3f6cc809233e", "address": "fa:16:3e:f4:1f:d6", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87cee679-c3", "ovs_interfaceid": "87cee679-c308-4c4f-b1b9-3f6cc809233e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.053824] env[68217]: DEBUG nova.compute.manager [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 964.053944] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 964.054940] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a088d280-53b8-47b6-ae94-2ea634921edd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.064358] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 964.064358] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-62db84b8-bf25-4492-8e76-1e869a4c7702 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.072888] env[68217]: DEBUG oslo_vmware.api [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 964.072888] env[68217]: value = "task-2961654" [ 964.072888] env[68217]: _type = "Task" [ 964.072888] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.079918] env[68217]: DEBUG oslo_vmware.api [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961654, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.233936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.398s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.237257] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 5.549s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.334655] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.335020] env[68217]: DEBUG nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Instance network_info: |[{"id": "c9ba3459-1c52-46fa-b8b7-7f41a840a334", "address": "fa:16:3e:a7:8a:0e", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ba3459-1c", "ovs_interfaceid": "c9ba3459-1c52-46fa-b8b7-7f41a840a334", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 964.335594] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:8a:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c9ba3459-1c52-46fa-b8b7-7f41a840a334', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.345276] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Creating folder: Project (b3068bf39ee943f1bdf378f8b2a5c360). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.345654] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-862b08a8-8290-4d32-b5ef-31481b8a1a0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.356521] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.360726] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Created folder: Project (b3068bf39ee943f1bdf378f8b2a5c360) in parent group-v594094. [ 964.360915] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Creating folder: Instances. Parent ref: group-v594338. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 964.361407] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e17d0717-4800-4567-a2a9-413bb3c6f609 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.372112] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Created folder: Instances in parent group-v594338. [ 964.372369] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.372726] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 964.372792] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c96ec75-6b45-41fc-8806-db276a4164c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.392181] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 964.392314] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cd55fb74-af63-4fbe-b486-d74bb2b73ac3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.398657] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 964.398657] env[68217]: value = "task-2961657" [ 964.398657] env[68217]: _type = "Task" [ 964.398657] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.400477] env[68217]: DEBUG oslo_vmware.api [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 964.400477] env[68217]: value = "task-2961658" [ 964.400477] env[68217]: _type = "Task" [ 964.400477] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.414510] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961657, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.418411] env[68217]: DEBUG oslo_vmware.api [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961658, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.496548] env[68217]: WARNING nova.compute.manager [None req-c91574fd-c925-4fb8-aa1a-01eb7db85125 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Image not found during snapshot: nova.exception.ImageNotFound: Image 6a8aa891-6c54-45d2-aa56-3de1711292a9 could not be found. [ 964.526304] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-01f97d0d-df21-441c-9dc6-5c51e3798d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 964.526868] env[68217]: DEBUG nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Instance network_info: |[{"id": "87cee679-c308-4c4f-b1b9-3f6cc809233e", "address": "fa:16:3e:f4:1f:d6", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87cee679-c3", "ovs_interfaceid": "87cee679-c308-4c4f-b1b9-3f6cc809233e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 964.527489] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:1f:d6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '87cee679-c308-4c4f-b1b9-3f6cc809233e', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 964.536830] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 964.536955] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 964.537554] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d1cd31b6-e66b-4e8b-9d01-2d516ecd98d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.560475] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 964.560475] env[68217]: value = "task-2961659" [ 964.560475] env[68217]: _type = "Task" [ 964.560475] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.569853] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961659, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.580646] env[68217]: DEBUG oslo_vmware.api [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961654, 'name': PowerOffVM_Task, 'duration_secs': 0.224334} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.580856] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 964.580974] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 964.581244] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e89ac33a-1d17-4418-ad4f-9e4a130e72cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.737314] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 964.737546] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 964.737743] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Deleting the datastore file [datastore1] 1f99ace3-1c5b-46ce-bb9c-74e139519da7 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 964.741036] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d1d676a-3d0d-4fcd-8a44-fa954b193db3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.746919] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5f942ff-a0cc-4254-b6e8-a0959b75310c tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 26.437s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.748523] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 3.615s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.748771] env[68217]: INFO nova.compute.manager [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Unshelving [ 964.756607] env[68217]: DEBUG oslo_vmware.api [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 964.756607] env[68217]: value = "task-2961661" [ 964.756607] env[68217]: _type = "Task" [ 964.756607] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 964.776028] env[68217]: DEBUG oslo_vmware.api [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961661, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.862806] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "4a555172-a2a3-410b-a0fe-38964cee9a22" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.865718] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.865718] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "4a555172-a2a3-410b-a0fe-38964cee9a22-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 964.865718] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 964.865718] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 964.867376] env[68217]: INFO nova.compute.manager [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Terminating instance [ 964.916054] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961657, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 964.920180] env[68217]: DEBUG oslo_vmware.api [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961658, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.008435] env[68217]: DEBUG nova.compute.manager [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Received event network-changed-87cee679-c308-4c4f-b1b9-3f6cc809233e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.008435] env[68217]: DEBUG nova.compute.manager [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Refreshing instance network info cache due to event network-changed-87cee679-c308-4c4f-b1b9-3f6cc809233e. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 965.008435] env[68217]: DEBUG oslo_concurrency.lockutils [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] Acquiring lock "refresh_cache-01f97d0d-df21-441c-9dc6-5c51e3798d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.008435] env[68217]: DEBUG oslo_concurrency.lockutils [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] Acquired lock "refresh_cache-01f97d0d-df21-441c-9dc6-5c51e3798d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.008435] env[68217]: DEBUG nova.network.neutron [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Refreshing network info cache for port 87cee679-c308-4c4f-b1b9-3f6cc809233e {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 965.046160] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee97d9bb-3669-4889-81b6-9a5b58b96eaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.055649] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a8e76b0-7d45-450f-8c20-8f67faa6f586 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.098708] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bdff1b2-260a-4305-98c9-0b9941f82dc1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.105880] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961659, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.111224] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2231421a-f96a-40c7-b1b0-3e8091f280e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.126419] env[68217]: DEBUG nova.compute.provider_tree [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.270640] env[68217]: DEBUG oslo_vmware.api [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961661, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193338} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.270971] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 965.271566] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 965.271566] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 965.271566] env[68217]: INFO nova.compute.manager [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Took 1.22 seconds to destroy the instance on the hypervisor. [ 965.271750] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 965.272540] env[68217]: DEBUG nova.compute.manager [-] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 965.272540] env[68217]: DEBUG nova.network.neutron [-] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 965.377172] env[68217]: DEBUG nova.compute.manager [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 965.377274] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.378344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889732bf-3df3-4cf2-8516-d2e4a2cbfe76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.386469] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.386742] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3f2b2f56-a34d-4a1d-96c3-1c50320fd579 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.392559] env[68217]: DEBUG oslo_vmware.api [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 965.392559] env[68217]: value = "task-2961662" [ 965.392559] env[68217]: _type = "Task" [ 965.392559] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.400386] env[68217]: DEBUG oslo_vmware.api [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961662, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.409448] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961657, 'name': CreateVM_Task, 'duration_secs': 0.796547} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.412889] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.413746] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.413912] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.414291] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.414968] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ada9152c-e038-4ba3-ad0d-2be2f56085d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.420450] env[68217]: DEBUG oslo_vmware.api [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961658, 'name': PowerOnVM_Task, 'duration_secs': 0.526764} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.421271] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 965.421546] env[68217]: DEBUG nova.compute.manager [None req-3643a3e3-f438-42da-b02a-893eb96c1353 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 965.422502] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9bc3baf-4d42-44f4-85e1-f1eeaf36f21f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.428216] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 965.428216] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8456c-c7f7-4ecd-a66e-05849b020a15" [ 965.428216] env[68217]: _type = "Task" [ 965.428216] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.440285] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8456c-c7f7-4ecd-a66e-05849b020a15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.477113] env[68217]: DEBUG nova.network.neutron [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Successfully updated port: 1b0e0705-6a81-45e3-b5b0-832547fed562 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 965.574581] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961659, 'name': CreateVM_Task, 'duration_secs': 0.634624} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.574898] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 965.575560] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.590491] env[68217]: DEBUG nova.compute.manager [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Received event network-vif-plugged-1b0e0705-6a81-45e3-b5b0-832547fed562 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.590753] env[68217]: DEBUG oslo_concurrency.lockutils [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] Acquiring lock "105e6181-19c4-466b-88a0-cdbca2cac230-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 965.590905] env[68217]: DEBUG oslo_concurrency.lockutils [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] Lock "105e6181-19c4-466b-88a0-cdbca2cac230-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 965.591082] env[68217]: DEBUG oslo_concurrency.lockutils [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] Lock "105e6181-19c4-466b-88a0-cdbca2cac230-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 965.591250] env[68217]: DEBUG nova.compute.manager [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] No waiting events found dispatching network-vif-plugged-1b0e0705-6a81-45e3-b5b0-832547fed562 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 965.591428] env[68217]: WARNING nova.compute.manager [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Received unexpected event network-vif-plugged-1b0e0705-6a81-45e3-b5b0-832547fed562 for instance with vm_state building and task_state spawning. [ 965.591563] env[68217]: DEBUG nova.compute.manager [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Received event network-changed-1b0e0705-6a81-45e3-b5b0-832547fed562 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 965.591697] env[68217]: DEBUG nova.compute.manager [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Refreshing instance network info cache due to event network-changed-1b0e0705-6a81-45e3-b5b0-832547fed562. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 965.591892] env[68217]: DEBUG oslo_concurrency.lockutils [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] Acquiring lock "refresh_cache-105e6181-19c4-466b-88a0-cdbca2cac230" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.592024] env[68217]: DEBUG oslo_concurrency.lockutils [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] Acquired lock "refresh_cache-105e6181-19c4-466b-88a0-cdbca2cac230" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.592179] env[68217]: DEBUG nova.network.neutron [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Refreshing network info cache for port 1b0e0705-6a81-45e3-b5b0-832547fed562 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 965.629877] env[68217]: DEBUG nova.scheduler.client.report [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 965.768675] env[68217]: DEBUG nova.compute.utils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 965.901973] env[68217]: DEBUG oslo_vmware.api [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961662, 'name': PowerOffVM_Task, 'duration_secs': 0.403779} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.902464] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.902639] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.902908] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-019991b9-b15e-4d4f-9d58-ecb70df40d57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.940907] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8456c-c7f7-4ecd-a66e-05849b020a15, 'name': SearchDatastore_Task, 'duration_secs': 0.01568} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.942126] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 965.942327] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 965.942563] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.942710] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.942886] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 965.943209] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 965.943538] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 965.943738] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c01e3ebc-b422-4292-8068-2f19f75b7fbd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.949146] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-773f6c73-5054-4483-a647-af5a1a96f761 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.951587] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 965.951587] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521eda50-a962-099a-6e19-e8733582d0ee" [ 965.951587] env[68217]: _type = "Task" [ 965.951587] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.956771] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 965.956874] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 965.957936] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-52bcbcb8-d896-495d-abc5-36704f70e484 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.965074] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521eda50-a962-099a-6e19-e8733582d0ee, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.968194] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 965.968194] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521919f2-17c6-01dc-2d4d-b52c610a048e" [ 965.968194] env[68217]: _type = "Task" [ 965.968194] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.974321] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 965.974321] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 965.974321] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleting the datastore file [datastore2] 4a555172-a2a3-410b-a0fe-38964cee9a22 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 965.974497] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-932a121a-e63d-4877-a2f8-f80aeb53841e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.983914] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "refresh_cache-105e6181-19c4-466b-88a0-cdbca2cac230" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.984178] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521919f2-17c6-01dc-2d4d-b52c610a048e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.989066] env[68217]: DEBUG oslo_vmware.api [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for the task: (returnval){ [ 965.989066] env[68217]: value = "task-2961664" [ 965.989066] env[68217]: _type = "Task" [ 965.989066] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.999210] env[68217]: DEBUG oslo_vmware.api [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961664, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.057459] env[68217]: DEBUG nova.network.neutron [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Updated VIF entry in instance network info cache for port 87cee679-c308-4c4f-b1b9-3f6cc809233e. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 966.058057] env[68217]: DEBUG nova.network.neutron [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Updating instance_info_cache with network_info: [{"id": "87cee679-c308-4c4f-b1b9-3f6cc809233e", "address": "fa:16:3e:f4:1f:d6", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap87cee679-c3", "ovs_interfaceid": "87cee679-c308-4c4f-b1b9-3f6cc809233e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.156998] env[68217]: DEBUG nova.network.neutron [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 966.274171] env[68217]: INFO nova.virt.block_device [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Booting with volume 3e056b4f-fea1-46b1-b841-37e7f391cb46 at /dev/sdb [ 966.314262] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2300e913-1c42-4b9e-b4fe-a93030c5998d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.325483] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a775a35-1e8d-4237-9341-d97414e6f857 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.359960] env[68217]: DEBUG nova.network.neutron [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 966.361165] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-619f6ef1-08a2-4a65-9808-7e1406cd970d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.369989] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-505e3907-150b-4fae-ac0f-066fbb7eb054 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.402481] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af9f246e-99b1-4e57-abb5-3239c290c6f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.409316] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c5d5e6f-1ce7-4504-b575-59a7f7a80f2b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.422601] env[68217]: DEBUG nova.virt.block_device [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating existing volume attachment record: 2375c380-53a2-44f9-a223-cec76f9ea6c8 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 966.462092] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521eda50-a962-099a-6e19-e8733582d0ee, 'name': SearchDatastore_Task, 'duration_secs': 0.020007} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.462569] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.462908] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 966.463261] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 966.482742] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521919f2-17c6-01dc-2d4d-b52c610a048e, 'name': SearchDatastore_Task, 'duration_secs': 0.014654} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.484341] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26d09431-f350-41e9-b583-dd0bafba8474 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.492019] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 966.492019] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f5cbb5-535e-1a94-5006-4ceb8813b71a" [ 966.492019] env[68217]: _type = "Task" [ 966.492019] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.507318] env[68217]: DEBUG oslo_vmware.api [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Task: {'id': task-2961664, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199295} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.507318] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.507318] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.507318] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.507594] env[68217]: INFO nova.compute.manager [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Took 1.13 seconds to destroy the instance on the hypervisor. [ 966.507972] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 966.508401] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f5cbb5-535e-1a94-5006-4ceb8813b71a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.508731] env[68217]: DEBUG nova.compute.manager [-] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 966.508934] env[68217]: DEBUG nova.network.neutron [-] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 966.563087] env[68217]: DEBUG oslo_concurrency.lockutils [req-467594be-ae8a-4f69-8bff-1075beea64dd req-da26b3ac-d4b7-45af-b7ba-3e3e23fbfd90 service nova] Releasing lock "refresh_cache-01f97d0d-df21-441c-9dc6-5c51e3798d81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.644031] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.405s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 966.645633] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.746s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 966.646107] env[68217]: DEBUG nova.objects.instance [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lazy-loading 'resources' on Instance uuid ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 966.866828] env[68217]: DEBUG oslo_concurrency.lockutils [req-9130c7de-eb23-478d-82b8-cb80276f34b0 req-1daa360f-8293-480f-957d-e61ae2751293 service nova] Releasing lock "refresh_cache-105e6181-19c4-466b-88a0-cdbca2cac230" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 966.866828] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "refresh_cache-105e6181-19c4-466b-88a0-cdbca2cac230" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 966.866828] env[68217]: DEBUG nova.network.neutron [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 967.012803] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f5cbb5-535e-1a94-5006-4ceb8813b71a, 'name': SearchDatastore_Task, 'duration_secs': 0.017472} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.013429] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.013837] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 776798bf-1ad4-4acb-ac58-cacc5493e1c7/776798bf-1ad4-4acb-ac58-cacc5493e1c7.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.014307] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 967.014628] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 967.014974] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-876f5b2f-8c66-4aac-af07-ccc1e3a53bec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.017515] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2870ef18-9fb3-4869-9953-278b1c74ce6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.027095] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 967.027095] env[68217]: value = "task-2961668" [ 967.027095] env[68217]: _type = "Task" [ 967.027095] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.029337] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 967.029679] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 967.034270] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4fa2b20-2815-4442-b858-1e7155bd0105 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.045194] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961668, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.045904] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 967.045904] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525b40ce-1805-f1e8-7331-bcab9ec5f066" [ 967.045904] env[68217]: _type = "Task" [ 967.045904] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.055916] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525b40ce-1805-f1e8-7331-bcab9ec5f066, 'name': SearchDatastore_Task, 'duration_secs': 0.008595} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.056918] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-760a9890-1ad6-43bb-9e30-d11914e5f3cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.064035] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 967.064035] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297cdfd-f142-cb06-d074-1605ccd79ef0" [ 967.064035] env[68217]: _type = "Task" [ 967.064035] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.074062] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297cdfd-f142-cb06-d074-1605ccd79ef0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.213449] env[68217]: INFO nova.scheduler.client.report [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted allocation for migration 491f93e8-1987-44d6-adf9-567206333bb4 [ 967.442736] env[68217]: DEBUG nova.network.neutron [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 967.454180] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d925d52-6955-436c-b8fd-ed46000e39fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.468909] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2300ee32-038e-40dd-b30e-88ac22e2ca77 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.516361] env[68217]: DEBUG nova.network.neutron [-] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.521471] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f2044d9-d9ff-436f-a283-e8167e18c7f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.535817] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52151d5a-d4fa-4f7c-9fdc-d1c2faa7b523 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.546784] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961668, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.555472] env[68217]: DEBUG nova.compute.provider_tree [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 967.577313] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5297cdfd-f142-cb06-d074-1605ccd79ef0, 'name': SearchDatastore_Task, 'duration_secs': 0.009506} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 967.577738] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 967.578164] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 01f97d0d-df21-441c-9dc6-5c51e3798d81/01f97d0d-df21-441c-9dc6-5c51e3798d81.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 967.578563] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5060100-78c2-4b7b-b7df-f85cd758e4e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.587832] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 967.587832] env[68217]: value = "task-2961669" [ 967.587832] env[68217]: _type = "Task" [ 967.587832] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.599713] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961669, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.695991] env[68217]: DEBUG nova.compute.manager [req-c151480b-400a-4813-a02a-13919d8a13fa req-2f1228d4-1f14-45b3-8840-6f68b1988d00 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-vif-deleted-f42ea21b-6ba7-4a8c-846e-852bfff623e1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.699296] env[68217]: DEBUG nova.compute.manager [req-c151480b-400a-4813-a02a-13919d8a13fa req-2f1228d4-1f14-45b3-8840-6f68b1988d00 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-vif-deleted-4e400891-4a77-45f0-a314-6963b2ea95f0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.699296] env[68217]: DEBUG nova.compute.manager [req-c151480b-400a-4813-a02a-13919d8a13fa req-2f1228d4-1f14-45b3-8840-6f68b1988d00 service nova] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Received event network-vif-deleted-c5a3c187-8daf-4f2c-b103-d26c4f4b1792 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.699296] env[68217]: DEBUG nova.compute.manager [req-c151480b-400a-4813-a02a-13919d8a13fa req-2f1228d4-1f14-45b3-8840-6f68b1988d00 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Received event network-vif-deleted-13f84539-5bd2-4d90-9636-4109e055cb5e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 967.699296] env[68217]: INFO nova.compute.manager [req-c151480b-400a-4813-a02a-13919d8a13fa req-2f1228d4-1f14-45b3-8840-6f68b1988d00 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Neutron deleted interface 13f84539-5bd2-4d90-9636-4109e055cb5e; detaching it from the instance and deleting it from the info cache [ 967.699296] env[68217]: DEBUG nova.network.neutron [req-c151480b-400a-4813-a02a-13919d8a13fa req-2f1228d4-1f14-45b3-8840-6f68b1988d00 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.724412] env[68217]: DEBUG oslo_concurrency.lockutils [None req-08c467a2-0130-43fe-b6ec-61333baf1549 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 12.745s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 967.739067] env[68217]: DEBUG nova.network.neutron [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Updating instance_info_cache with network_info: [{"id": "1b0e0705-6a81-45e3-b5b0-832547fed562", "address": "fa:16:3e:a7:79:2a", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b0e0705-6a", "ovs_interfaceid": "1b0e0705-6a81-45e3-b5b0-832547fed562", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.754880] env[68217]: DEBUG nova.network.neutron [-] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 968.026682] env[68217]: INFO nova.compute.manager [-] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Took 2.75 seconds to deallocate network for instance. [ 968.042878] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961668, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.698996} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.043227] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 776798bf-1ad4-4acb-ac58-cacc5493e1c7/776798bf-1ad4-4acb-ac58-cacc5493e1c7.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.044024] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.044024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-149769f6-5047-4006-a102-251617f5fc43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.051193] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 968.051193] env[68217]: value = "task-2961670" [ 968.051193] env[68217]: _type = "Task" [ 968.051193] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.064182] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961670, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.082909] env[68217]: ERROR nova.scheduler.client.report [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [req-063df30a-3bf0-4e89-b5d9-339c1d5caceb] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-063df30a-3bf0-4e89-b5d9-339c1d5caceb"}]} [ 968.099111] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961669, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.109435] env[68217]: DEBUG nova.scheduler.client.report [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 968.128275] env[68217]: DEBUG nova.scheduler.client.report [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 968.128543] env[68217]: DEBUG nova.compute.provider_tree [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 968.140070] env[68217]: DEBUG nova.scheduler.client.report [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 968.142243] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "35817c87-0c55-49bd-917a-59bd39de663c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.142467] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.142664] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "35817c87-0c55-49bd-917a-59bd39de663c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.142842] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 968.143009] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 968.145070] env[68217]: INFO nova.compute.manager [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Terminating instance [ 968.164987] env[68217]: DEBUG nova.scheduler.client.report [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 968.204838] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0822e805-c143-49c7-895a-a1737e8be77b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.222457] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7f1a58d-49bd-49c2-a85b-1c4940576ae5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.241114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "refresh_cache-105e6181-19c4-466b-88a0-cdbca2cac230" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 968.241374] env[68217]: DEBUG nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Instance network_info: |[{"id": "1b0e0705-6a81-45e3-b5b0-832547fed562", "address": "fa:16:3e:a7:79:2a", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1b0e0705-6a", "ovs_interfaceid": "1b0e0705-6a81-45e3-b5b0-832547fed562", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 968.241822] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a7:79:2a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1b0e0705-6a81-45e3-b5b0-832547fed562', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 968.250517] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 968.266675] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 968.267927] env[68217]: INFO nova.compute.manager [-] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Took 1.76 seconds to deallocate network for instance. [ 968.268399] env[68217]: DEBUG nova.compute.manager [req-c151480b-400a-4813-a02a-13919d8a13fa req-2f1228d4-1f14-45b3-8840-6f68b1988d00 service nova] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Detach interface failed, port_id=13f84539-5bd2-4d90-9636-4109e055cb5e, reason: Instance 4a555172-a2a3-410b-a0fe-38964cee9a22 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 968.269261] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89688cb3-f387-4b1f-8750-ac6d9ca30549 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.296514] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 968.296514] env[68217]: value = "task-2961671" [ 968.296514] env[68217]: _type = "Task" [ 968.296514] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.304736] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961671, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.451026] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbd8658b-84b3-448e-a15e-3847b7ad5971 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.458279] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e49e4a-b51e-48c6-9d0c-3b66b75bfbd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.487544] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-577c5c2f-4f62-47b3-8a42-a01197d75f3e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.494696] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4889365-1f58-47e8-bcd0-3be0730bf8a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.510541] env[68217]: DEBUG nova.compute.provider_tree [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 968.537418] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.560494] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961670, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080158} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.560794] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 968.561628] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-113ff821-7545-4f1d-ae58-adecbfd3ee1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.584489] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 776798bf-1ad4-4acb-ac58-cacc5493e1c7/776798bf-1ad4-4acb-ac58-cacc5493e1c7.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 968.584489] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ed2b36a4-00f2-4dcd-8359-6dfb7e480167 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.607560] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961669, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.543578} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 968.611089] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 01f97d0d-df21-441c-9dc6-5c51e3798d81/01f97d0d-df21-441c-9dc6-5c51e3798d81.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 968.611089] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 968.611089] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 968.611089] env[68217]: value = "task-2961672" [ 968.611089] env[68217]: _type = "Task" [ 968.611089] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.611089] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a240a0b2-f09a-464a-8e6f-05f12bb2eeef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.618971] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961672, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.620552] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 968.620552] env[68217]: value = "task-2961673" [ 968.620552] env[68217]: _type = "Task" [ 968.620552] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.651125] env[68217]: DEBUG nova.compute.manager [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 968.651125] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 968.651451] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb932be9-78d6-4c4f-b151-8a9b43579db3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.659268] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.660141] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d893cc82-43da-4a72-b75c-c511ac24b58e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.666330] env[68217]: DEBUG oslo_vmware.api [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 968.666330] env[68217]: value = "task-2961674" [ 968.666330] env[68217]: _type = "Task" [ 968.666330] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.674511] env[68217]: DEBUG oslo_vmware.api [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961674, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.794042] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.808314] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961671, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.812584] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 968.812841] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.013800] env[68217]: DEBUG nova.scheduler.client.report [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 969.122550] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961672, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.130231] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961673, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072242} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.130444] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 969.131213] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a1c268f-d8c6-4ea9-9fba-a94438d0537b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.154512] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 01f97d0d-df21-441c-9dc6-5c51e3798d81/01f97d0d-df21-441c-9dc6-5c51e3798d81.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 969.154840] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-225b083b-0949-4244-adbf-72ea68bba258 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.180722] env[68217]: DEBUG oslo_vmware.api [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961674, 'name': PowerOffVM_Task, 'duration_secs': 0.288141} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.181447] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.181618] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 969.181958] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 969.181958] env[68217]: value = "task-2961676" [ 969.181958] env[68217]: _type = "Task" [ 969.181958] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.182166] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce8d12ff-3cf2-4609-9c48-89280e457da8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.191423] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961676, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.266964] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 969.266964] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 969.266964] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleting the datastore file [datastore1] 35817c87-0c55-49bd-917a-59bd39de663c {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 969.266964] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a839bdcb-af40-4b7c-9adf-c0a53e757c69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.274773] env[68217]: DEBUG oslo_vmware.api [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 969.274773] env[68217]: value = "task-2961678" [ 969.274773] env[68217]: _type = "Task" [ 969.274773] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.282794] env[68217]: DEBUG oslo_vmware.api [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961678, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.306824] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961671, 'name': CreateVM_Task, 'duration_secs': 0.621334} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.306998] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 969.307704] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.307898] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.308222] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 969.308473] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f892bf7-8491-4fba-83ec-bb4f7f3bfaaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.312982] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 969.312982] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52205a0a-4351-2946-dc79-f22f945c086e" [ 969.312982] env[68217]: _type = "Task" [ 969.312982] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.316399] env[68217]: DEBUG nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 969.324068] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52205a0a-4351-2946-dc79-f22f945c086e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.519379] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.874s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 969.522113] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.586s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 969.523772] env[68217]: INFO nova.compute.claims [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 969.544274] env[68217]: INFO nova.scheduler.client.report [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted allocations for instance ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe [ 969.623535] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961672, 'name': ReconfigVM_Task, 'duration_secs': 0.698499} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.623535] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 776798bf-1ad4-4acb-ac58-cacc5493e1c7/776798bf-1ad4-4acb-ac58-cacc5493e1c7.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 969.623535] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d2047c20-eed5-4df9-9dd3-ef8fce030344 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.629201] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 969.629201] env[68217]: value = "task-2961679" [ 969.629201] env[68217]: _type = "Task" [ 969.629201] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.637445] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961679, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.697354] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961676, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.785224] env[68217]: DEBUG oslo_vmware.api [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961678, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.196931} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.785544] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 969.785759] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 969.785963] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 969.786158] env[68217]: INFO nova.compute.manager [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Took 1.14 seconds to destroy the instance on the hypervisor. [ 969.786397] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 969.786601] env[68217]: DEBUG nova.compute.manager [-] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 969.786709] env[68217]: DEBUG nova.network.neutron [-] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 969.833268] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52205a0a-4351-2946-dc79-f22f945c086e, 'name': SearchDatastore_Task, 'duration_secs': 0.017618} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.833838] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 969.834107] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 969.834341] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 969.834523] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 969.834672] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 969.835116] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-589c6689-9ed5-4b54-8865-bca9e705f599 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.844505] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 969.845532] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 969.847073] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe80d9bf-5f21-48c2-9b43-fb44bb632f9d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.851438] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 969.854789] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 969.854789] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b221c3-7329-da4a-7bfa-2ea96aca63b1" [ 969.854789] env[68217]: _type = "Task" [ 969.854789] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.864194] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b221c3-7329-da4a-7bfa-2ea96aca63b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.055248] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ef7c8d28-2c13-49ab-a71b-e8f940c221e8 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.820s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.139555] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961679, 'name': Rename_Task, 'duration_secs': 0.191161} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.140057] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.140422] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8fb988ee-994e-415e-bdb2-6c57086004b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.148197] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 970.148197] env[68217]: value = "task-2961680" [ 970.148197] env[68217]: _type = "Task" [ 970.148197] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.159577] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961680, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.196507] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961676, 'name': ReconfigVM_Task, 'duration_secs': 0.71222} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.196815] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 01f97d0d-df21-441c-9dc6-5c51e3798d81/01f97d0d-df21-441c-9dc6-5c51e3798d81.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 970.197881] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa4987e5-aa51-46a0-a23f-8baae8bec7cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.205281] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 970.205281] env[68217]: value = "task-2961681" [ 970.205281] env[68217]: _type = "Task" [ 970.205281] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.215143] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961681, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.248847] env[68217]: DEBUG nova.compute.manager [req-64b181e6-6231-4a37-a5af-e14438baa5d0 req-b3b2b958-b951-4246-8503-a61aa35fb73d service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Received event network-vif-deleted-63b92bd9-8f95-481e-9ef4-468ea20dade1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 970.248847] env[68217]: INFO nova.compute.manager [req-64b181e6-6231-4a37-a5af-e14438baa5d0 req-b3b2b958-b951-4246-8503-a61aa35fb73d service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Neutron deleted interface 63b92bd9-8f95-481e-9ef4-468ea20dade1; detaching it from the instance and deleting it from the info cache [ 970.248847] env[68217]: DEBUG nova.network.neutron [req-64b181e6-6231-4a37-a5af-e14438baa5d0 req-b3b2b958-b951-4246-8503-a61aa35fb73d service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.365288] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b221c3-7329-da4a-7bfa-2ea96aca63b1, 'name': SearchDatastore_Task, 'duration_secs': 0.009362} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.368406] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ad76bfb-c7aa-41a0-913a-513792bcf71e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.375183] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 970.375183] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527dee22-93fe-c0fe-a520-2464a6a51532" [ 970.375183] env[68217]: _type = "Task" [ 970.375183] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.387699] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527dee22-93fe-c0fe-a520-2464a6a51532, 'name': SearchDatastore_Task, 'duration_secs': 0.009372} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.387952] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 970.388219] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 105e6181-19c4-466b-88a0-cdbca2cac230/105e6181-19c4-466b-88a0-cdbca2cac230.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 970.388468] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2329ab5-aae1-4681-9bb4-210331414815 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.400137] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 970.400137] env[68217]: value = "task-2961682" [ 970.400137] env[68217]: _type = "Task" [ 970.400137] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.409972] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961682, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.663715] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961680, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.718838] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961681, 'name': Rename_Task, 'duration_secs': 0.238265} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.719102] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 970.719363] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0a7b340f-152f-40a5-a0f0-02dab920e773 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.731024] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 970.731024] env[68217]: value = "task-2961683" [ 970.731024] env[68217]: _type = "Task" [ 970.731024] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.731588] env[68217]: DEBUG nova.network.neutron [-] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 970.740497] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961683, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.756902] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd24796c-601b-4250-aed1-2fe27f4696b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.764804] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.765169] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.765298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 970.765687] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 970.765687] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 970.770729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b113a5e-1bab-4e58-a3f8-1d2754d896a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.788437] env[68217]: INFO nova.compute.manager [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Terminating instance [ 970.817775] env[68217]: DEBUG nova.compute.manager [req-64b181e6-6231-4a37-a5af-e14438baa5d0 req-b3b2b958-b951-4246-8503-a61aa35fb73d service nova] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Detach interface failed, port_id=63b92bd9-8f95-481e-9ef4-468ea20dade1, reason: Instance 35817c87-0c55-49bd-917a-59bd39de663c could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 970.909685] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961682, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47964} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.911024] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 105e6181-19c4-466b-88a0-cdbca2cac230/105e6181-19c4-466b-88a0-cdbca2cac230.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 970.911141] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 970.912469] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eb6e018-9c76-4d1f-9ff2-4a48c5410184 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.914766] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1710c89-30bc-447d-b092-c7874f11485b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.921015] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-503f79f3-789f-448b-91ff-f224fb143952 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.925494] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 970.925494] env[68217]: value = "task-2961684" [ 970.925494] env[68217]: _type = "Task" [ 970.925494] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.960730] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af16c154-cb20-4d4c-92f1-d9d1ece27f82 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.966255] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961684, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.970556] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18bc46de-8950-47cb-a59e-7f40f5e0620a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.986693] env[68217]: DEBUG nova.compute.provider_tree [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 971.158417] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961680, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.234518] env[68217]: INFO nova.compute.manager [-] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Took 1.45 seconds to deallocate network for instance. [ 971.240183] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961683, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.296020] env[68217]: DEBUG nova.compute.manager [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 971.296263] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 971.297213] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846e153a-2bc9-4f06-aecd-d69c74de63fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.306325] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 971.307048] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-768e7460-42fe-421a-a995-6debc98beeec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.313081] env[68217]: DEBUG oslo_vmware.api [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 971.313081] env[68217]: value = "task-2961685" [ 971.313081] env[68217]: _type = "Task" [ 971.313081] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.322363] env[68217]: DEBUG oslo_vmware.api [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961685, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.437593] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961684, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06281} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.437752] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 971.438589] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e734c2-fcbb-4ead-8d7e-d665ec84cf80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.467425] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 105e6181-19c4-466b-88a0-cdbca2cac230/105e6181-19c4-466b-88a0-cdbca2cac230.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.467827] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51547bb9-8978-4ddf-9987-1b807a69004c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.491363] env[68217]: DEBUG nova.scheduler.client.report [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 971.495435] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 971.495435] env[68217]: value = "task-2961686" [ 971.495435] env[68217]: _type = "Task" [ 971.495435] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.508581] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961686, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.662084] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961680, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.740601] env[68217]: DEBUG oslo_vmware.api [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961683, 'name': PowerOnVM_Task, 'duration_secs': 0.866414} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.740882] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 971.741240] env[68217]: INFO nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Took 10.31 seconds to spawn the instance on the hypervisor. [ 971.741324] env[68217]: DEBUG nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 971.742131] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-433ce698-5a9b-4775-9bbc-cd03daf7ee0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.747098] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 971.824255] env[68217]: DEBUG oslo_vmware.api [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961685, 'name': PowerOffVM_Task, 'duration_secs': 0.25423} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.824793] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 971.825103] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 971.825455] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-32764c71-a20f-4ace-9fc2-bd5e58d0d87d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.891579] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 971.891986] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 971.891986] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleting the datastore file [datastore1] aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 971.892209] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7dcee652-fe4d-46d8-ae3f-6168d749fec6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.899693] env[68217]: DEBUG oslo_vmware.api [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for the task: (returnval){ [ 971.899693] env[68217]: value = "task-2961688" [ 971.899693] env[68217]: _type = "Task" [ 971.899693] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.910942] env[68217]: DEBUG oslo_vmware.api [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961688, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.998879] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.477s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 971.999597] env[68217]: DEBUG nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 972.004511] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.646s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.004511] env[68217]: DEBUG nova.objects.instance [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lazy-loading 'resources' on Instance uuid bd62c682-24f2-4559-887a-03186409f699 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 972.014238] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961686, 'name': ReconfigVM_Task, 'duration_secs': 0.514265} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.015206] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 105e6181-19c4-466b-88a0-cdbca2cac230/105e6181-19c4-466b-88a0-cdbca2cac230.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 972.017126] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ff570eb2-ff95-47c6-a7e4-dab22aeb8a0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.026532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.026532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.027240] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 972.027240] env[68217]: value = "task-2961689" [ 972.027240] env[68217]: _type = "Task" [ 972.027240] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.037169] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961689, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.160826] env[68217]: DEBUG oslo_vmware.api [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961680, 'name': PowerOnVM_Task, 'duration_secs': 1.893688} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.161177] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 972.161395] env[68217]: INFO nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Took 13.37 seconds to spawn the instance on the hypervisor. [ 972.161571] env[68217]: DEBUG nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 972.162475] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eefa625-5ad2-4bae-9509-4d45aa5905f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.269531] env[68217]: INFO nova.compute.manager [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Took 15.30 seconds to build instance. [ 972.410109] env[68217]: DEBUG oslo_vmware.api [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Task: {'id': task-2961688, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.183627} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.410388] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 972.410572] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 972.410750] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 972.410922] env[68217]: INFO nova.compute.manager [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 972.411256] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 972.411388] env[68217]: DEBUG nova.compute.manager [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 972.411482] env[68217]: DEBUG nova.network.neutron [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 972.466057] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.466286] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 972.510282] env[68217]: DEBUG nova.compute.utils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 972.519370] env[68217]: DEBUG nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 972.519370] env[68217]: DEBUG nova.network.neutron [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 972.531026] env[68217]: DEBUG nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 972.540537] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961689, 'name': Rename_Task, 'duration_secs': 0.28081} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.541265] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 972.541265] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46addd5e-2ff6-4ad0-be65-2393fd97797c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.554626] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 972.554626] env[68217]: value = "task-2961690" [ 972.554626] env[68217]: _type = "Task" [ 972.554626] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.569021] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961690, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.569021] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 972.634518] env[68217]: DEBUG nova.policy [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 972.685520] env[68217]: INFO nova.compute.manager [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Took 30.12 seconds to build instance. [ 972.744611] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c768908f-01b7-4a7c-bc23-c97397179a31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.753439] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Suspending the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 972.753700] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-ee120543-0a30-4880-9685-25a467a8a354 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.759297] env[68217]: DEBUG oslo_vmware.api [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 972.759297] env[68217]: value = "task-2961691" [ 972.759297] env[68217]: _type = "Task" [ 972.759297] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.769543] env[68217]: DEBUG oslo_vmware.api [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961691, 'name': SuspendVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.772277] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c184db8b-a4f5-4a00-ab44-b771d89e38cb tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.815s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 972.836310] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-891b2d01-2d1c-4977-89fb-24bb05677d74 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.846275] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170ea7ee-cae7-4f9c-bfe7-81559160e7b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.879461] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2bfc92d-4c89-4299-a0b0-f927720f7edd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.888428] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bcb4020-1e7e-446a-9e57-bf6f081f75f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.903031] env[68217]: DEBUG nova.compute.provider_tree [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 972.969248] env[68217]: DEBUG nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 973.022598] env[68217]: DEBUG nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 973.046816] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.065204] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961690, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.188625] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d926eebe-a2ef-4aee-aafe-baf6453434e7 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.628s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 973.274797] env[68217]: DEBUG oslo_vmware.api [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961691, 'name': SuspendVM_Task} progress is 62%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.425373] env[68217]: ERROR nova.scheduler.client.report [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] [req-1b28c404-3701-424e-82ab-c6ed46b7d855] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1b28c404-3701-424e-82ab-c6ed46b7d855"}]} [ 973.446558] env[68217]: DEBUG nova.scheduler.client.report [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 973.476822] env[68217]: DEBUG nova.scheduler.client.report [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 973.477018] env[68217]: DEBUG nova.compute.provider_tree [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.493850] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 973.503505] env[68217]: DEBUG nova.scheduler.client.report [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 973.538847] env[68217]: DEBUG nova.compute.manager [req-40003dd0-1bb9-4468-9143-6bf4e9d55f6f req-590fa30e-d29e-48d8-ac37-616e4c92d27f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Received event network-vif-deleted-04299da0-d810-4014-b79f-1ac8a45e1a8f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 973.540564] env[68217]: INFO nova.compute.manager [req-40003dd0-1bb9-4468-9143-6bf4e9d55f6f req-590fa30e-d29e-48d8-ac37-616e4c92d27f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Neutron deleted interface 04299da0-d810-4014-b79f-1ac8a45e1a8f; detaching it from the instance and deleting it from the info cache [ 973.540960] env[68217]: DEBUG nova.network.neutron [req-40003dd0-1bb9-4468-9143-6bf4e9d55f6f req-590fa30e-d29e-48d8-ac37-616e4c92d27f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.544585] env[68217]: DEBUG nova.scheduler.client.report [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 973.566986] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961690, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.577852] env[68217]: DEBUG nova.network.neutron [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.774762] env[68217]: DEBUG oslo_vmware.api [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961691, 'name': SuspendVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.807951] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7128c4d-7858-4827-bfd1-4096b7485c3b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.813457] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0f0f06b-0b33-4579-8dff-e2adc622c70a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.853020] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626fe3b1-e900-4cba-ac92-41cb90279538 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.861271] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad9c74b-ff39-487d-be4f-fce20a72e0e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 973.876620] env[68217]: DEBUG nova.compute.provider_tree [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 973.905662] env[68217]: DEBUG nova.network.neutron [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Successfully created port: 05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 974.044855] env[68217]: DEBUG nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 974.047303] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6946827-9519-464b-9958-0d09797ccaff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.058293] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0239267c-6084-46c7-80b3-898459d22020 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.081802] env[68217]: INFO nova.compute.manager [-] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Took 1.67 seconds to deallocate network for instance. [ 974.084533] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 974.084755] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 974.084925] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 974.085131] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 974.085278] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 974.085424] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 974.085631] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 974.085818] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 974.085999] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 974.086289] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 974.086472] env[68217]: DEBUG nova.virt.hardware [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 974.092559] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f33121-438d-46a3-8c3f-2b3b6e931821 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.108663] env[68217]: DEBUG nova.compute.manager [req-40003dd0-1bb9-4468-9143-6bf4e9d55f6f req-590fa30e-d29e-48d8-ac37-616e4c92d27f service nova] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Detach interface failed, port_id=04299da0-d810-4014-b79f-1ac8a45e1a8f, reason: Instance aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 974.109094] env[68217]: DEBUG oslo_vmware.api [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961690, 'name': PowerOnVM_Task, 'duration_secs': 1.210441} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.109717] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 974.110094] env[68217]: INFO nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Took 10.25 seconds to spawn the instance on the hypervisor. [ 974.110094] env[68217]: DEBUG nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.110868] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c88187ee-ce3a-4af9-a194-7dccb88e9061 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.116601] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86c1fefb-8fdb-48bf-b445-718d1fc1220b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.273568] env[68217]: DEBUG oslo_vmware.api [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961691, 'name': SuspendVM_Task, 'duration_secs': 1.066008} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 974.273894] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Suspended the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 974.274074] env[68217]: DEBUG nova.compute.manager [None req-dca89f7f-8328-4526-abe1-358c038aa0f4 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 974.274830] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22d28b8d-cca0-439f-8664-fe64e01f7fb6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.409096] env[68217]: DEBUG nova.scheduler.client.report [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 123 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 974.409368] env[68217]: DEBUG nova.compute.provider_tree [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 123 to 124 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 974.409548] env[68217]: DEBUG nova.compute.provider_tree [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 974.593386] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 974.637565] env[68217]: INFO nova.compute.manager [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Took 17.50 seconds to build instance. [ 974.915132] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.912s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 974.917624] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.380s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 974.917778] env[68217]: DEBUG nova.objects.instance [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lazy-loading 'resources' on Instance uuid 1f99ace3-1c5b-46ce-bb9c-74e139519da7 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 974.937179] env[68217]: INFO nova.scheduler.client.report [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Deleted allocations for instance bd62c682-24f2-4559-887a-03186409f699 [ 975.139614] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1f3811aa-1c70-4849-a2fe-bc8596f5dfc4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "105e6181-19c4-466b-88a0-cdbca2cac230" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.017s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.445346] env[68217]: DEBUG oslo_concurrency.lockutils [None req-790ba93b-de51-460a-b7f0-7d1e46260311 tempest-MigrationsAdminTest-1132531208 tempest-MigrationsAdminTest-1132531208-project-member] Lock "bd62c682-24f2-4559-887a-03186409f699" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.349s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.448162] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "01f97d0d-df21-441c-9dc6-5c51e3798d81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.448449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.448706] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "01f97d0d-df21-441c-9dc6-5c51e3798d81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.449033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.449290] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 975.451749] env[68217]: INFO nova.compute.manager [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Terminating instance [ 975.510756] env[68217]: DEBUG nova.network.neutron [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Successfully updated port: 05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 975.570842] env[68217]: DEBUG nova.compute.manager [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Received event network-changed-c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 975.570842] env[68217]: DEBUG nova.compute.manager [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Refreshing instance network info cache due to event network-changed-c9ba3459-1c52-46fa-b8b7-7f41a840a334. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 975.570842] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Acquiring lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.570973] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Acquired lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 975.571194] env[68217]: DEBUG nova.network.neutron [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Refreshing network info cache for port c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 975.684161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f9298e2-f4fe-4052-916c-e98da07db7ff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.692315] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3f76787-edb2-41a7-811c-d102cf531ebc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.722804] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-722b8f7e-c9e8-417e-933a-c7948f457450 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.730304] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0da4077d-0bdc-4131-843c-450e889e02a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.743408] env[68217]: DEBUG nova.compute.provider_tree [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 975.863980] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "66ca9efd-1839-4e98-b006-5fc3adda375d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 975.864227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 975.957248] env[68217]: DEBUG nova.compute.manager [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 975.957476] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 975.958370] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abbc5f5-3066-43c3-8ec6-6ad1e35ee921 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.966240] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 975.966507] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0186be9f-6d95-4f47-97c0-70b4cb05ff52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.016810] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 976.016964] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 976.017148] env[68217]: DEBUG nova.network.neutron [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 976.036014] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 976.036246] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 976.036431] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleting the datastore file [datastore2] 01f97d0d-df21-441c-9dc6-5c51e3798d81 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 976.036690] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-51a0d013-dd07-41f4-81b5-8f3b3b809753 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.043802] env[68217]: DEBUG oslo_vmware.api [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 976.043802] env[68217]: value = "task-2961693" [ 976.043802] env[68217]: _type = "Task" [ 976.043802] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.052028] env[68217]: DEBUG oslo_vmware.api [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961693, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.247985] env[68217]: DEBUG nova.scheduler.client.report [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 976.366777] env[68217]: DEBUG nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 976.413906] env[68217]: DEBUG nova.network.neutron [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Updated VIF entry in instance network info cache for port c9ba3459-1c52-46fa-b8b7-7f41a840a334. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 976.414284] env[68217]: DEBUG nova.network.neutron [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Updating instance_info_cache with network_info: [{"id": "c9ba3459-1c52-46fa-b8b7-7f41a840a334", "address": "fa:16:3e:a7:8a:0e", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.198", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc9ba3459-1c", "ovs_interfaceid": "c9ba3459-1c52-46fa-b8b7-7f41a840a334", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.554082] env[68217]: DEBUG oslo_vmware.api [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961693, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.180784} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 976.554915] env[68217]: DEBUG nova.network.neutron [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 976.556881] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 976.557081] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 976.557259] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 976.557433] env[68217]: INFO nova.compute.manager [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Took 0.60 seconds to destroy the instance on the hypervisor. [ 976.557671] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 976.557872] env[68217]: DEBUG nova.compute.manager [-] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 976.557962] env[68217]: DEBUG nova.network.neutron [-] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 976.755113] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.835s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.755641] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.962s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.755988] env[68217]: DEBUG nova.objects.instance [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lazy-loading 'resources' on Instance uuid 4a555172-a2a3-410b-a0fe-38964cee9a22 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 976.781237] env[68217]: DEBUG nova.network.neutron [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.785314] env[68217]: INFO nova.scheduler.client.report [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Deleted allocations for instance 1f99ace3-1c5b-46ce-bb9c-74e139519da7 [ 976.889422] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.916572] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Releasing lock "refresh_cache-776798bf-1ad4-4acb-ac58-cacc5493e1c7" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 976.916854] env[68217]: DEBUG nova.compute.manager [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-vif-plugged-05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.917069] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 976.917441] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 976.917625] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 976.917799] env[68217]: DEBUG nova.compute.manager [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] No waiting events found dispatching network-vif-plugged-05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 976.917974] env[68217]: WARNING nova.compute.manager [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received unexpected event network-vif-plugged-05c67562-5b0b-421a-a707-1d10d90f4a71 for instance with vm_state building and task_state spawning. [ 976.918146] env[68217]: DEBUG nova.compute.manager [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-changed-05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 976.918301] env[68217]: DEBUG nova.compute.manager [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing instance network info cache due to event network-changed-05c67562-5b0b-421a-a707-1d10d90f4a71. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 976.918465] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.287885] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 977.288239] env[68217]: DEBUG nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Instance network_info: |[{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 977.288529] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.288864] env[68217]: DEBUG nova.network.neutron [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing network info cache for port 05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 977.290708] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:62:b9', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '05c67562-5b0b-421a-a707-1d10d90f4a71', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 977.299019] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 977.309088] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 977.309088] env[68217]: DEBUG oslo_concurrency.lockutils [None req-17e233d7-e5dd-4bda-bcbc-3da7631e049d tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "1f99ace3-1c5b-46ce-bb9c-74e139519da7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.762s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 977.309555] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c2c575f-a819-42bc-be4a-47ae4c9be16a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.336885] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 977.336885] env[68217]: value = "task-2961694" [ 977.336885] env[68217]: _type = "Task" [ 977.336885] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.346250] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961694, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.382222] env[68217]: DEBUG nova.network.neutron [-] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 977.558030] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f32fbe19-b461-46a9-abc3-df076e71ece2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.568426] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0abfa0b4-73d9-4c2a-a2fc-8e4cb9cd5470 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.603614] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8666d7f0-6e7b-4167-bf37-5bd276860716 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.607974] env[68217]: DEBUG nova.compute.manager [req-29483f5e-34dc-42bc-8e94-062b7acf088f req-c8e6d280-0d81-486d-9460-f22d3ca888ee service nova] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Received event network-vif-deleted-87cee679-c308-4c4f-b1b9-3f6cc809233e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 977.613780] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a363a262-f23b-4763-a311-57fc24f40667 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.630229] env[68217]: DEBUG nova.compute.provider_tree [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 977.847898] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961694, 'name': CreateVM_Task, 'duration_secs': 0.407936} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.848266] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 977.849050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.849255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 977.849640] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 977.849920] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a574771-446f-4699-bab2-6e6ee2a3ca2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.857273] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 977.857273] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52509d44-ce77-567f-b74e-6a4f123d7deb" [ 977.857273] env[68217]: _type = "Task" [ 977.857273] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.867705] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52509d44-ce77-567f-b74e-6a4f123d7deb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.886115] env[68217]: INFO nova.compute.manager [-] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Took 1.33 seconds to deallocate network for instance. [ 977.991316] env[68217]: DEBUG nova.network.neutron [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updated VIF entry in instance network info cache for port 05c67562-5b0b-421a-a707-1d10d90f4a71. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 977.991316] env[68217]: DEBUG nova.network.neutron [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.134140] env[68217]: DEBUG nova.scheduler.client.report [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 978.368063] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52509d44-ce77-567f-b74e-6a4f123d7deb, 'name': SearchDatastore_Task, 'duration_secs': 0.010384} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.368469] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.368746] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 978.369011] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.369177] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 978.369350] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 978.369618] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ffb0f574-38a0-4bdb-b10b-b64b70fcb4d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.379487] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 978.379692] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 978.380535] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6e8773a5-3bc8-4193-8028-739aeb35c0e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.386398] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 978.386398] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b81771-13a8-b3c8-d463-195af4afde92" [ 978.386398] env[68217]: _type = "Task" [ 978.386398] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.394706] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 978.401042] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b81771-13a8-b3c8-d463-195af4afde92, 'name': SearchDatastore_Task, 'duration_secs': 0.008283} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.401804] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d885f052-7b07-4688-8bb1-2657051347a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.408874] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 978.408874] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5296081f-536b-5963-71e1-3fd9e5d90bbf" [ 978.408874] env[68217]: _type = "Task" [ 978.408874] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.416575] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5296081f-536b-5963-71e1-3fd9e5d90bbf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 978.493739] env[68217]: DEBUG oslo_concurrency.lockutils [req-bff5debb-8b10-4b57-b33f-063f9c8a1103 req-1daa423f-65e7-426e-b1aa-8a4910242f90 service nova] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.639472] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 978.645167] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.792s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 978.648489] env[68217]: INFO nova.compute.claims [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 978.692406] env[68217]: INFO nova.scheduler.client.report [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Deleted allocations for instance 4a555172-a2a3-410b-a0fe-38964cee9a22 [ 978.922611] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5296081f-536b-5963-71e1-3fd9e5d90bbf, 'name': SearchDatastore_Task, 'duration_secs': 0.009665} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 978.922940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 978.923253] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 35c4ab95-fc14-4bd4-a2a5-64f15f070b88/35c4ab95-fc14-4bd4-a2a5-64f15f070b88.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 978.923855] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5d93ba5d-e71c-4e87-aaf0-ba718edf6dd7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.930121] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 978.930121] env[68217]: value = "task-2961695" [ 978.930121] env[68217]: _type = "Task" [ 978.930121] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 978.939655] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961695, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.201659] env[68217]: DEBUG oslo_concurrency.lockutils [None req-214f4719-d421-46d0-8993-1840ef1744a5 tempest-ImagesTestJSON-1122737556 tempest-ImagesTestJSON-1122737556-project-member] Lock "4a555172-a2a3-410b-a0fe-38964cee9a22" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.338s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 979.375548] env[68217]: DEBUG nova.compute.manager [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 979.376971] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1702f6-274c-4051-bc24-9618221def57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.444766] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961695, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.457244} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.445296] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 35c4ab95-fc14-4bd4-a2a5-64f15f070b88/35c4ab95-fc14-4bd4-a2a5-64f15f070b88.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 979.445510] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 979.445843] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-78ea0bc6-658b-4bc0-bef1-78aac72df00d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.453225] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 979.453225] env[68217]: value = "task-2961696" [ 979.453225] env[68217]: _type = "Task" [ 979.453225] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 979.466485] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961696, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 979.886616] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5750f7fb-df1c-4b7b-83ff-db11598b741c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.891195] env[68217]: INFO nova.compute.manager [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] instance snapshotting [ 979.891445] env[68217]: DEBUG nova.objects.instance [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'flavor' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 979.898049] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc4d723-0aa5-4a3e-ab74-5526b2fee711 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.931077] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7882e83b-54d5-4630-97fa-e20b090e86fc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.942656] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d76007-2a00-443a-bb9e-48875986293d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.955999] env[68217]: DEBUG nova.compute.provider_tree [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 979.966561] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961696, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.05863} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 979.966873] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 979.967825] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7418cb68-e868-4774-b698-bbf77f2d1395 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.991759] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 35c4ab95-fc14-4bd4-a2a5-64f15f070b88/35c4ab95-fc14-4bd4-a2a5-64f15f070b88.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 979.992518] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f6a7a30-7b4a-4c5a-9ae0-aba27516f733 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.015100] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 980.015100] env[68217]: value = "task-2961697" [ 980.015100] env[68217]: _type = "Task" [ 980.015100] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.023773] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961697, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.028569] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.028569] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.400022] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ef11041-a3de-4a2d-b0a8-5661e2a456e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.419133] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0566f384-dd28-44ef-90ce-169990d884be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.481606] env[68217]: ERROR nova.scheduler.client.report [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [req-2cd0c4ca-ca8f-46ac-9727-aaec045ec18b] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2cd0c4ca-ca8f-46ac-9727-aaec045ec18b"}]} [ 980.500020] env[68217]: DEBUG nova.scheduler.client.report [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 980.516017] env[68217]: DEBUG nova.scheduler.client.report [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 980.516017] env[68217]: DEBUG nova.compute.provider_tree [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 980.526480] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961697, 'name': ReconfigVM_Task, 'duration_secs': 0.351918} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 980.527484] env[68217]: DEBUG nova.scheduler.client.report [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 980.529763] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 35c4ab95-fc14-4bd4-a2a5-64f15f070b88/35c4ab95-fc14-4bd4-a2a5-64f15f070b88.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 980.530762] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b0815ee4-7828-48fe-ba55-a5728728812a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.533008] env[68217]: DEBUG nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 980.542071] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 980.542071] env[68217]: value = "task-2961698" [ 980.542071] env[68217]: _type = "Task" [ 980.542071] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.550181] env[68217]: DEBUG nova.scheduler.client.report [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 980.555722] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961698, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 980.708361] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 980.708603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 980.825140] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97a985bd-0976-4cc1-9faf-fb5768a23b2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.832841] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e0d44b-6c9e-4897-92b9-cb52ffae0ec1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.866436] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78f76cc-094a-4326-b0ed-13cda5d2af88 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.873745] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe35351-17c6-426e-b97c-6f2fc5d72181 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.887362] env[68217]: DEBUG nova.compute.provider_tree [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 980.932539] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 980.933320] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-0c246649-b9a8-4534-bad3-4fe5f3f745d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.943671] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 980.943671] env[68217]: value = "task-2961699" [ 980.943671] env[68217]: _type = "Task" [ 980.943671] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 980.951984] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961699, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.105479] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961698, 'name': Rename_Task, 'duration_secs': 0.135598} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.105479] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 981.105479] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-309dbda5-656b-43e3-a7b2-ed051ae4988d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.105479] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.105479] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 981.105479] env[68217]: value = "task-2961700" [ 981.105479] env[68217]: _type = "Task" [ 981.105479] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.105479] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961700, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.211517] env[68217]: DEBUG nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 981.426040] env[68217]: DEBUG nova.scheduler.client.report [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 127 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 981.426040] env[68217]: DEBUG nova.compute.provider_tree [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 127 to 128 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 981.426268] env[68217]: DEBUG nova.compute.provider_tree [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 981.455398] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961699, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.587689] env[68217]: DEBUG oslo_vmware.api [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961700, 'name': PowerOnVM_Task, 'duration_secs': 0.488417} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.587974] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 981.588202] env[68217]: INFO nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Took 7.54 seconds to spawn the instance on the hypervisor. [ 981.588394] env[68217]: DEBUG nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 981.589234] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36673cb1-e77d-4b0a-9086-2df59371058b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.741125] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 981.931158] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.288s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.931974] env[68217]: DEBUG nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 981.937657] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.188s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.942027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 981.944250] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.377s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 981.944466] env[68217]: DEBUG nova.objects.instance [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'pci_requests' on Instance uuid d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.956481] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961699, 'name': CreateSnapshot_Task, 'duration_secs': 0.934725} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.957401] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 981.958269] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d932418-769a-47b3-b407-7bceb6aed1e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.977022] env[68217]: INFO nova.scheduler.client.report [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted allocations for instance 35817c87-0c55-49bd-917a-59bd39de663c [ 982.112539] env[68217]: INFO nova.compute.manager [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Took 21.20 seconds to build instance. [ 982.448898] env[68217]: DEBUG nova.compute.utils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 982.450799] env[68217]: DEBUG nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 982.450890] env[68217]: DEBUG nova.network.neutron [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 982.454690] env[68217]: DEBUG nova.objects.instance [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'numa_topology' on Instance uuid d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.488130] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 982.490978] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-097808bd-4450-4d06-b822-f44d55806463 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.500093] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e392e10d-22a0-4b7e-9e44-82880df974f8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "35817c87-0c55-49bd-917a-59bd39de663c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.357s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.509192] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 982.509192] env[68217]: value = "task-2961701" [ 982.509192] env[68217]: _type = "Task" [ 982.509192] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.521357] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961701, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.537492] env[68217]: DEBUG nova.policy [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '752b6d9ab4d64b1390ca8388fb28db15', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ad331ad8f44348f6b4c0a6c56977022d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 982.562418] env[68217]: DEBUG nova.compute.manager [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-changed-05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 982.562602] env[68217]: DEBUG nova.compute.manager [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing instance network info cache due to event network-changed-05c67562-5b0b-421a-a707-1d10d90f4a71. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 982.562816] env[68217]: DEBUG oslo_concurrency.lockutils [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.562959] env[68217]: DEBUG oslo_concurrency.lockutils [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 982.563420] env[68217]: DEBUG nova.network.neutron [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing network info cache for port 05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 982.614735] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cb2e5c15-ac1e-45a3-9ed4-708b3b33c52b tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.711s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 982.956888] env[68217]: DEBUG nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 982.960115] env[68217]: INFO nova.compute.claims [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.020265] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961701, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.022580] env[68217]: DEBUG nova.network.neutron [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Successfully created port: 2b488304-74ad-4bc6-9590-24a5d0f5d001 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 983.294297] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 983.294597] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 983.520902] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961701, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.540086] env[68217]: DEBUG nova.network.neutron [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updated VIF entry in instance network info cache for port 05c67562-5b0b-421a-a707-1d10d90f4a71. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 983.540524] env[68217]: DEBUG nova.network.neutron [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.797522] env[68217]: DEBUG nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 983.971607] env[68217]: DEBUG nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 983.994049] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 983.994301] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.994522] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 983.994632] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.994776] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 983.994931] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 983.995261] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 983.995506] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 983.995741] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 983.995919] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 983.996217] env[68217]: DEBUG nova.virt.hardware [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 983.997127] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2068832f-4ab0-4580-853f-fb4a88887155 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.017630] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62eda2fd-6d0c-433b-913e-77fe056056f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.042159] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961701, 'name': CloneVM_Task, 'duration_secs': 1.403671} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.042861] env[68217]: DEBUG oslo_concurrency.lockutils [req-c3f5fec2-65bc-4227-838f-43ad42aaf493 req-d83caec2-da54-4987-8d50-1a9ec66051aa service nova] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 984.043945] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Created linked-clone VM from snapshot [ 984.045061] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78f98651-1803-4f91-a32e-e10ef2574980 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.055913] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Uploading image e2ae22af-290f-4e09-86ea-4b5f1f275371 {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 984.089337] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 984.089337] env[68217]: value = "vm-594347" [ 984.089337] env[68217]: _type = "VirtualMachine" [ 984.089337] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 984.089617] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-8e959ff5-9410-445d-8229-82a31574206d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.096213] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease: (returnval){ [ 984.096213] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be310d-9e75-cfbd-bf78-3c047c214f58" [ 984.096213] env[68217]: _type = "HttpNfcLease" [ 984.096213] env[68217]: } obtained for exporting VM: (result){ [ 984.096213] env[68217]: value = "vm-594347" [ 984.096213] env[68217]: _type = "VirtualMachine" [ 984.096213] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 984.096656] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the lease: (returnval){ [ 984.096656] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be310d-9e75-cfbd-bf78-3c047c214f58" [ 984.096656] env[68217]: _type = "HttpNfcLease" [ 984.096656] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 984.104424] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 984.104424] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be310d-9e75-cfbd-bf78-3c047c214f58" [ 984.104424] env[68217]: _type = "HttpNfcLease" [ 984.104424] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 984.266673] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3874502-70f1-4a76-9610-f27ea63eb217 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.275391] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d79b3275-9108-4f9a-bf2d-e786f071b43a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.316747] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c62f460-28b4-4022-91c0-a586cabf5be2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.327038] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43dbfda5-9b4d-4af4-9665-9ad1c5c46c43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.331709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.342197] env[68217]: DEBUG nova.compute.provider_tree [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 984.608475] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 984.608475] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be310d-9e75-cfbd-bf78-3c047c214f58" [ 984.608475] env[68217]: _type = "HttpNfcLease" [ 984.608475] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 984.608797] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 984.608797] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be310d-9e75-cfbd-bf78-3c047c214f58" [ 984.608797] env[68217]: _type = "HttpNfcLease" [ 984.608797] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 984.609545] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d346bd3-4c02-430a-ae22-63b3526a6ef1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.621166] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c96bc-22ed-a017-b774-13e61d8381c8/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 984.621401] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c96bc-22ed-a017-b774-13e61d8381c8/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 984.727868] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-17c6ac2b-3ff0-4c22-8068-91623b5c8fa6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.822630] env[68217]: DEBUG nova.compute.manager [req-a23c2f95-b940-4cb9-a055-2f00b80f1847 req-ffa26c25-f7a2-43f7-9f15-984b071d9ed8 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Received event network-vif-plugged-2b488304-74ad-4bc6-9590-24a5d0f5d001 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 984.822630] env[68217]: DEBUG oslo_concurrency.lockutils [req-a23c2f95-b940-4cb9-a055-2f00b80f1847 req-ffa26c25-f7a2-43f7-9f15-984b071d9ed8 service nova] Acquiring lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 984.822630] env[68217]: DEBUG oslo_concurrency.lockutils [req-a23c2f95-b940-4cb9-a055-2f00b80f1847 req-ffa26c25-f7a2-43f7-9f15-984b071d9ed8 service nova] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 984.822630] env[68217]: DEBUG oslo_concurrency.lockutils [req-a23c2f95-b940-4cb9-a055-2f00b80f1847 req-ffa26c25-f7a2-43f7-9f15-984b071d9ed8 service nova] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 984.822630] env[68217]: DEBUG nova.compute.manager [req-a23c2f95-b940-4cb9-a055-2f00b80f1847 req-ffa26c25-f7a2-43f7-9f15-984b071d9ed8 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] No waiting events found dispatching network-vif-plugged-2b488304-74ad-4bc6-9590-24a5d0f5d001 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 984.822630] env[68217]: WARNING nova.compute.manager [req-a23c2f95-b940-4cb9-a055-2f00b80f1847 req-ffa26c25-f7a2-43f7-9f15-984b071d9ed8 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Received unexpected event network-vif-plugged-2b488304-74ad-4bc6-9590-24a5d0f5d001 for instance with vm_state building and task_state spawning. [ 984.846507] env[68217]: DEBUG nova.scheduler.client.report [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 984.986072] env[68217]: DEBUG nova.network.neutron [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Successfully updated port: 2b488304-74ad-4bc6-9590-24a5d0f5d001 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 985.353766] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.408s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 985.355315] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.308s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 985.356490] env[68217]: INFO nova.compute.claims [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 985.395345] env[68217]: INFO nova.network.neutron [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating port c4ffafa7-b375-4f41-90e8-0db42f248139 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 985.493275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "refresh_cache-92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.493275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "refresh_cache-92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 985.493275] env[68217]: DEBUG nova.network.neutron [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 986.049878] env[68217]: DEBUG nova.network.neutron [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 986.178399] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "33802025-7f72-4ad9-80fe-b15196b1a577" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 986.178908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "33802025-7f72-4ad9-80fe-b15196b1a577" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 986.247669] env[68217]: DEBUG nova.network.neutron [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Updating instance_info_cache with network_info: [{"id": "2b488304-74ad-4bc6-9590-24a5d0f5d001", "address": "fa:16:3e:81:8e:4e", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b488304-74", "ovs_interfaceid": "2b488304-74ad-4bc6-9590-24a5d0f5d001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 986.628959] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b728185-6e8a-41a1-a804-17b61179e756 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.636953] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5004b33a-3f97-4e31-b6b0-1be1b055cddf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.669296] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43a598c9-e388-4579-a5cc-34c64a31728a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.676976] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fd666f-b29d-43be-9405-4a89a2ed9eb2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.682450] env[68217]: DEBUG nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 986.694969] env[68217]: DEBUG nova.compute.provider_tree [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 986.750052] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "refresh_cache-92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 986.750372] env[68217]: DEBUG nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Instance network_info: |[{"id": "2b488304-74ad-4bc6-9590-24a5d0f5d001", "address": "fa:16:3e:81:8e:4e", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b488304-74", "ovs_interfaceid": "2b488304-74ad-4bc6-9590-24a5d0f5d001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 986.750765] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:81:8e:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b1f3e6c3-5584-4852-9017-476ab8ac4946', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '2b488304-74ad-4bc6-9590-24a5d0f5d001', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.759802] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 986.761670] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.761951] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0f9923a-f122-431c-b8c9-23442f2bc9a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.784152] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.784152] env[68217]: value = "task-2961703" [ 986.784152] env[68217]: _type = "Task" [ 986.784152] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.792263] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961703, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.854718] env[68217]: DEBUG nova.compute.manager [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Received event network-changed-2b488304-74ad-4bc6-9590-24a5d0f5d001 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 986.854902] env[68217]: DEBUG nova.compute.manager [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Refreshing instance network info cache due to event network-changed-2b488304-74ad-4bc6-9590-24a5d0f5d001. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 986.855142] env[68217]: DEBUG oslo_concurrency.lockutils [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] Acquiring lock "refresh_cache-92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.855284] env[68217]: DEBUG oslo_concurrency.lockutils [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] Acquired lock "refresh_cache-92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.855447] env[68217]: DEBUG nova.network.neutron [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Refreshing network info cache for port 2b488304-74ad-4bc6-9590-24a5d0f5d001 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 986.980917] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.981173] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 986.981428] env[68217]: DEBUG nova.network.neutron [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 987.200311] env[68217]: DEBUG nova.scheduler.client.report [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 987.206050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 987.294634] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961703, 'name': CreateVM_Task, 'duration_secs': 0.357784} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.295531] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 987.295695] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.295865] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.296849] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 987.296849] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bdb5181-b589-4c0d-ae47-a8c53c3b397e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.301160] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 987.301160] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5211899b-7368-6833-fcbf-b9f14629442e" [ 987.301160] env[68217]: _type = "Task" [ 987.301160] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.309078] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5211899b-7368-6833-fcbf-b9f14629442e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.640227] env[68217]: DEBUG nova.network.neutron [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Updated VIF entry in instance network info cache for port 2b488304-74ad-4bc6-9590-24a5d0f5d001. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 987.640912] env[68217]: DEBUG nova.network.neutron [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Updating instance_info_cache with network_info: [{"id": "2b488304-74ad-4bc6-9590-24a5d0f5d001", "address": "fa:16:3e:81:8e:4e", "network": {"id": "cd6d19ff-5e39-4a30-9a9c-cef5b4cb3af1", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-270531081-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ad331ad8f44348f6b4c0a6c56977022d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b1f3e6c3-5584-4852-9017-476ab8ac4946", "external-id": "nsx-vlan-transportzone-304", "segmentation_id": 304, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap2b488304-74", "ovs_interfaceid": "2b488304-74ad-4bc6-9590-24a5d0f5d001", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.705971] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.351s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 987.705971] env[68217]: DEBUG nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 987.710724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.217s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 987.713118] env[68217]: INFO nova.compute.claims [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 987.816178] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5211899b-7368-6833-fcbf-b9f14629442e, 'name': SearchDatastore_Task, 'duration_secs': 0.013088} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.816501] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 987.816744] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 987.816978] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 987.817138] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 987.817326] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 987.817591] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b421c1b8-6407-4e7d-903e-52e54c843a6e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.826532] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 987.826708] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 987.827465] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-470528f6-0974-4542-8c82-cd89720d5d88 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.833263] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 987.833263] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527ff8ff-59e2-9560-f56f-5880f83d5b59" [ 987.833263] env[68217]: _type = "Task" [ 987.833263] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.841850] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527ff8ff-59e2-9560-f56f-5880f83d5b59, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.843570] env[68217]: DEBUG nova.network.neutron [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": "c4ffafa7-b375-4f41-90e8-0db42f248139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 988.147217] env[68217]: DEBUG oslo_concurrency.lockutils [req-c1931d98-58d3-42c6-a483-12875c65a87c req-f677c35a-7abc-4b64-970e-41c82e3ad489 service nova] Releasing lock "refresh_cache-92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.217984] env[68217]: DEBUG nova.compute.utils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 988.221883] env[68217]: DEBUG nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 988.221883] env[68217]: DEBUG nova.network.neutron [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 988.297696] env[68217]: DEBUG nova.policy [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03904e82faab40849ad7b7818bf2a121', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1424003d74424a9e84d15879f2e634e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 988.344717] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527ff8ff-59e2-9560-f56f-5880f83d5b59, 'name': SearchDatastore_Task, 'duration_secs': 0.011669} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.345747] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfb8c0c7-cb24-44d1-939b-b952354826aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.348572] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.355184] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 988.355184] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5213ad16-1cfd-24ff-0f21-87c61968a79c" [ 988.355184] env[68217]: _type = "Task" [ 988.355184] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.364391] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5213ad16-1cfd-24ff-0f21-87c61968a79c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.375702] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='9e13d894cd72faee4af7041a9b268f57',container_format='bare',created_at=2025-03-12T08:21:17Z,direct_url=,disk_format='vmdk',id=16498e3d-bc2f-4eea-bb32-2fe6e2472b81,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-638632277-shelved',owner='c0522eaa6ebc48a28651f6b3bf1434f3',properties=ImageMetaProps,protected=,size=31670272,status='active',tags=,updated_at=2025-03-12T08:21:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 988.375980] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.376214] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 988.376444] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.376628] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 988.377439] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 988.377439] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 988.377439] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 988.377439] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 988.377753] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 988.377877] env[68217]: DEBUG nova.virt.hardware [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 988.378781] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e6b678f-8969-42d0-bbf3-50379e2f0432 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.386778] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1374108f-5d24-4040-b5aa-a5d2beb01792 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.402320] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:21:dc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c4ffafa7-b375-4f41-90e8-0db42f248139', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 988.410178] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 988.410631] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 988.410924] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1c658446-ffca-4dfc-b38f-72b0c6da2e5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.431459] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 988.431459] env[68217]: value = "task-2961704" [ 988.431459] env[68217]: _type = "Task" [ 988.431459] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.439979] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961704, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.610846] env[68217]: DEBUG nova.network.neutron [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Successfully created port: 46e4edb9-72c4-4a7d-af91-4b553d829391 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 988.724990] env[68217]: DEBUG nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 988.866740] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5213ad16-1cfd-24ff-0f21-87c61968a79c, 'name': SearchDatastore_Task, 'duration_secs': 0.01217} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.871110] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 988.871491] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc/92f99a45-13a3-48d9-8dbc-4065cc8ee9dc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 988.873089] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-39bc724c-0079-40c2-a4b0-d171c3c76932 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.880270] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 988.880270] env[68217]: value = "task-2961705" [ 988.880270] env[68217]: _type = "Task" [ 988.880270] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.894591] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961705, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.917479] env[68217]: DEBUG nova.compute.manager [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-vif-plugged-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.917717] env[68217]: DEBUG oslo_concurrency.lockutils [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 988.917926] env[68217]: DEBUG oslo_concurrency.lockutils [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 988.918234] env[68217]: DEBUG oslo_concurrency.lockutils [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 988.918448] env[68217]: DEBUG nova.compute.manager [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] No waiting events found dispatching network-vif-plugged-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 988.918614] env[68217]: WARNING nova.compute.manager [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received unexpected event network-vif-plugged-c4ffafa7-b375-4f41-90e8-0db42f248139 for instance with vm_state shelved_offloaded and task_state spawning. [ 988.918803] env[68217]: DEBUG nova.compute.manager [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 988.918931] env[68217]: DEBUG nova.compute.manager [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing instance network info cache due to event network-changed-c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 988.919145] env[68217]: DEBUG oslo_concurrency.lockutils [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] Acquiring lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.919283] env[68217]: DEBUG oslo_concurrency.lockutils [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] Acquired lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.919450] env[68217]: DEBUG nova.network.neutron [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Refreshing network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 988.946735] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961704, 'name': CreateVM_Task, 'duration_secs': 0.483215} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.946974] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 988.947772] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 988.947950] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 988.948408] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 988.948739] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a376625-ccc7-4fd2-9ce4-8ddbefe2b32a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.954412] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 988.954412] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210b5e5-3603-a99c-d7a9-3981303a2714" [ 988.954412] env[68217]: _type = "Task" [ 988.954412] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.967201] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5210b5e5-3603-a99c-d7a9-3981303a2714, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.055849] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23270109-0bb5-4648-9353-4cd5433de8ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.063751] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-074b8ba4-ef97-4172-bc47-5e219169c5f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.099058] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48c8d344-ab2f-4a2f-8a08-f9948d396d6c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.109548] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-537c2578-d6de-495d-af60-fd29af518615 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.123960] env[68217]: DEBUG nova.compute.provider_tree [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 989.396838] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961705, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.471621] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 989.472134] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Processing image 16498e3d-bc2f-4eea-bb32-2fe6e2472b81 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 989.472512] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81/16498e3d-bc2f-4eea-bb32-2fe6e2472b81.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.472792] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81/16498e3d-bc2f-4eea-bb32-2fe6e2472b81.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 989.473099] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 989.473490] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-87eb84a6-e0ab-4881-aa1c-5858cc052fc0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.487016] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 989.487580] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 989.489041] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c562a3ce-f7b9-4ea5-8580-630c5625c069 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.499242] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 989.499242] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5271a563-fd15-7bc1-3cc2-3acd249738bc" [ 989.499242] env[68217]: _type = "Task" [ 989.499242] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.510583] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Preparing fetch location {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 989.511865] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Fetch image to [datastore1] OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401/OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401.vmdk {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 989.512237] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Downloading stream optimized image 16498e3d-bc2f-4eea-bb32-2fe6e2472b81 to [datastore1] OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401/OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401.vmdk on the data store datastore1 as vApp {{(pid=68217) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 989.512592] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Downloading image file data 16498e3d-bc2f-4eea-bb32-2fe6e2472b81 to the ESX as VM named 'OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401' {{(pid=68217) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 989.586766] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 989.586766] env[68217]: value = "resgroup-9" [ 989.586766] env[68217]: _type = "ResourcePool" [ 989.586766] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 989.587404] env[68217]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-b775101f-8de0-4bc8-9289-62fed8534757 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.608639] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease: (returnval){ [ 989.608639] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5242979b-8a98-6c68-bc76-7bec402810e9" [ 989.608639] env[68217]: _type = "HttpNfcLease" [ 989.608639] env[68217]: } obtained for vApp import into resource pool (val){ [ 989.608639] env[68217]: value = "resgroup-9" [ 989.608639] env[68217]: _type = "ResourcePool" [ 989.608639] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 989.608929] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the lease: (returnval){ [ 989.608929] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5242979b-8a98-6c68-bc76-7bec402810e9" [ 989.608929] env[68217]: _type = "HttpNfcLease" [ 989.608929] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 989.617978] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 989.617978] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5242979b-8a98-6c68-bc76-7bec402810e9" [ 989.617978] env[68217]: _type = "HttpNfcLease" [ 989.617978] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 989.627038] env[68217]: DEBUG nova.scheduler.client.report [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 989.711672] env[68217]: DEBUG nova.network.neutron [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updated VIF entry in instance network info cache for port c4ffafa7-b375-4f41-90e8-0db42f248139. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 989.712059] env[68217]: DEBUG nova.network.neutron [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [{"id": "c4ffafa7-b375-4f41-90e8-0db42f248139", "address": "fa:16:3e:50:21:dc", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.150", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc4ffafa7-b3", "ovs_interfaceid": "c4ffafa7-b375-4f41-90e8-0db42f248139", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 989.738079] env[68217]: DEBUG nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 989.893056] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961705, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.539635} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.893342] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc/92f99a45-13a3-48d9-8dbc-4065cc8ee9dc.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 989.893596] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 989.893855] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22da1833-2344-4d1a-b226-8f1f0931ad73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.900850] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 989.900850] env[68217]: value = "task-2961707" [ 989.900850] env[68217]: _type = "Task" [ 989.900850] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.909131] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961707, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.118440] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 990.118440] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5242979b-8a98-6c68-bc76-7bec402810e9" [ 990.118440] env[68217]: _type = "HttpNfcLease" [ 990.118440] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 990.135751] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.136399] env[68217]: DEBUG nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 990.139181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.546s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.139635] env[68217]: DEBUG nova.objects.instance [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lazy-loading 'resources' on Instance uuid aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 990.215206] env[68217]: DEBUG oslo_concurrency.lockutils [req-2af9e9f1-f114-4c81-b2ea-2108ab99e721 req-c533284f-4611-4138-b818-a5fff1b372a0 service nova] Releasing lock "refresh_cache-d14026b1-84dd-430e-be94-94dcb1f47473" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 990.411823] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961707, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07621} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.412214] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 990.412915] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-754d4d7c-cff4-4420-9620-ad1bea4150d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.435472] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc/92f99a45-13a3-48d9-8dbc-4065cc8ee9dc.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 990.436171] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f7aca41-3902-4af7-b0c3-d02b2fbcfba8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.455946] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 990.455946] env[68217]: value = "task-2961708" [ 990.455946] env[68217]: _type = "Task" [ 990.455946] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.464133] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961708, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.619471] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 990.619471] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5242979b-8a98-6c68-bc76-7bec402810e9" [ 990.619471] env[68217]: _type = "HttpNfcLease" [ 990.619471] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 990.619783] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 990.619783] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5242979b-8a98-6c68-bc76-7bec402810e9" [ 990.619783] env[68217]: _type = "HttpNfcLease" [ 990.619783] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 990.620547] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6df7b84-9599-43e8-83c8-5e4da4d1ecf4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.628250] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52504b65-5f49-fd23-117f-c3e3c1df1266/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 990.628556] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating HTTP connection to write to file with size = 31670272 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52504b65-5f49-fd23-117f-c3e3c1df1266/disk-0.vmdk. {{(pid=68217) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 990.684644] env[68217]: DEBUG nova.compute.utils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 990.689393] env[68217]: DEBUG nova.network.neutron [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Successfully updated port: 46e4edb9-72c4-4a7d-af91-4b553d829391 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 990.691863] env[68217]: DEBUG nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 990.692941] env[68217]: DEBUG nova.network.neutron [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 990.697443] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-aad2c6a2-8e56-47dd-aa48-c2cd6fd9a718 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.735152] env[68217]: DEBUG nova.policy [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03904e82faab40849ad7b7818bf2a121', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1424003d74424a9e84d15879f2e634e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 990.972168] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961708, 'name': ReconfigVM_Task, 'duration_secs': 0.28254} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.973082] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc/92f99a45-13a3-48d9-8dbc-4065cc8ee9dc.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 990.975359] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e7c18da5-d63c-4315-81e3-fbe7cfdcaecc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.983305] env[68217]: DEBUG nova.compute.manager [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Received event network-vif-plugged-46e4edb9-72c4-4a7d-af91-4b553d829391 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.983823] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] Acquiring lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 990.984135] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 990.984435] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 990.984691] env[68217]: DEBUG nova.compute.manager [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] No waiting events found dispatching network-vif-plugged-46e4edb9-72c4-4a7d-af91-4b553d829391 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 990.984959] env[68217]: WARNING nova.compute.manager [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Received unexpected event network-vif-plugged-46e4edb9-72c4-4a7d-af91-4b553d829391 for instance with vm_state building and task_state spawning. [ 990.985297] env[68217]: DEBUG nova.compute.manager [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Received event network-changed-46e4edb9-72c4-4a7d-af91-4b553d829391 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 990.985656] env[68217]: DEBUG nova.compute.manager [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Refreshing instance network info cache due to event network-changed-46e4edb9-72c4-4a7d-af91-4b553d829391. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 990.986027] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] Acquiring lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.986272] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] Acquired lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 990.986563] env[68217]: DEBUG nova.network.neutron [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Refreshing network info cache for port 46e4edb9-72c4-4a7d-af91-4b553d829391 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 990.996547] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 990.996547] env[68217]: value = "task-2961709" [ 990.996547] env[68217]: _type = "Task" [ 990.996547] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.010130] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961709, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.061080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7a77a5c-a8b8-4cda-bb84-98d6603eac2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.083663] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7145eea8-2377-4351-8aa5-7a0879947638 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.132152] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9414d2-0d0c-4d21-9a3a-0868e4112aad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.142103] env[68217]: DEBUG nova.network.neutron [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Successfully created port: 893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 991.148258] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e920dc01-6a70-4296-90c5-ff3e2ab75dc1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.164319] env[68217]: DEBUG nova.compute.provider_tree [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 991.190076] env[68217]: DEBUG nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 991.197684] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.389293] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 991.389655] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 991.390665] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 991.390953] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 991.391157] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 991.391391] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 991.391644] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 991.391809] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 991.391992] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 991.392212] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 991.392414] env[68217]: DEBUG nova.virt.hardware [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 991.393419] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4b79625-d41a-4bb4-9054-ca9f726b60e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.414304] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96a42755-a005-4942-8f69-3d340f9c5327 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.508299] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961709, 'name': Rename_Task, 'duration_secs': 0.228758} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.508607] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 991.508878] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-03d25a92-fe4e-46e7-8d90-555cc372754c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.517186] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 991.517186] env[68217]: value = "task-2961710" [ 991.517186] env[68217]: _type = "Task" [ 991.517186] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.527659] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961710, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.538157] env[68217]: DEBUG nova.network.neutron [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 991.661970] env[68217]: DEBUG nova.network.neutron [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.670711] env[68217]: DEBUG nova.scheduler.client.report [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 991.905078] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Completed reading data from the image iterator. {{(pid=68217) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 991.905311] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52504b65-5f49-fd23-117f-c3e3c1df1266/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 991.906501] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb05050-1a5a-4e90-b2dc-a429784dd6a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.914862] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52504b65-5f49-fd23-117f-c3e3c1df1266/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 991.915089] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52504b65-5f49-fd23-117f-c3e3c1df1266/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 991.915367] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-8dac0ce1-a072-42be-bb89-5861182b171b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.029032] env[68217]: DEBUG oslo_vmware.api [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961710, 'name': PowerOnVM_Task, 'duration_secs': 0.506288} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.029308] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 992.029672] env[68217]: INFO nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Took 8.06 seconds to spawn the instance on the hypervisor. [ 992.029885] env[68217]: DEBUG nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 992.031528] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b3e27bf-4ff7-45ad-a7f5-c0a3bc3dbe91 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.132952] env[68217]: DEBUG oslo_vmware.rw_handles [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52504b65-5f49-fd23-117f-c3e3c1df1266/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 992.133262] env[68217]: INFO nova.virt.vmwareapi.images [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Downloaded image file data 16498e3d-bc2f-4eea-bb32-2fe6e2472b81 [ 992.134147] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64f676b3-8241-4a5b-b8a0-c54300322df5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.150964] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-48d493a2-7c15-45e0-a430-8baa7008a173 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.164523] env[68217]: DEBUG oslo_concurrency.lockutils [req-fb317de5-424d-44dc-bf0e-99cd3349d8a1 req-34db3e5c-ed59-4cde-a76e-a8cb21657d92 service nova] Releasing lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 992.164923] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 992.165117] env[68217]: DEBUG nova.network.neutron [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 992.177413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.038s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.179964] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.291s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 992.181526] env[68217]: INFO nova.compute.claims [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 992.185626] env[68217]: INFO nova.virt.vmwareapi.images [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] The imported VM was unregistered [ 992.187946] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Caching image {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 992.187946] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating directory with path [datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.187946] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-1c68a9a3-f0d2-4ed2-8727-522fb0bf3984 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.200973] env[68217]: DEBUG nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 992.204419] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created directory with path [datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81 {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.204571] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401/OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401.vmdk to [datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81/16498e3d-bc2f-4eea-bb32-2fe6e2472b81.vmdk. {{(pid=68217) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 992.204928] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-bd65d9bb-8db0-4a3c-8ce5-65352d21ac5f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.209713] env[68217]: INFO nova.scheduler.client.report [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Deleted allocations for instance aa4b9cc8-d0dc-4a0b-9eec-dceace695df9 [ 992.217176] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 992.217176] env[68217]: value = "task-2961712" [ 992.217176] env[68217]: _type = "Task" [ 992.217176] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.231343] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961712, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.237304] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 992.237304] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 992.237304] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 992.237609] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 992.237609] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 992.237691] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 992.237994] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 992.238183] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 992.238360] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 992.238589] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 992.238837] env[68217]: DEBUG nova.virt.hardware [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 992.240838] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08e7beee-b4ee-4bd7-a154-3615a71fda22 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.248362] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-833f9ea6-0408-4322-861a-e84f5a23ed8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.551542] env[68217]: INFO nova.compute.manager [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Took 22.73 seconds to build instance. [ 992.650554] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c96bc-22ed-a017-b774-13e61d8381c8/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 992.651577] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff46d25-692f-468d-af05-af3df1b8e867 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.664386] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c96bc-22ed-a017-b774-13e61d8381c8/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 992.664626] env[68217]: ERROR oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c96bc-22ed-a017-b774-13e61d8381c8/disk-0.vmdk due to incomplete transfer. [ 992.664801] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-4c0ec9ce-62a3-42e3-bc6b-feb1dec01316 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.677063] env[68217]: DEBUG oslo_vmware.rw_handles [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/526c96bc-22ed-a017-b774-13e61d8381c8/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 992.677305] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Uploaded image e2ae22af-290f-4e09-86ea-4b5f1f275371 to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 992.680012] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 992.680327] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-582c607f-8c50-40f2-9b10-43e3fe1d1ee0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.694276] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 992.694276] env[68217]: value = "task-2961713" [ 992.694276] env[68217]: _type = "Task" [ 992.694276] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.706765] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961713, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.724758] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a8dec65-3597-4a0e-8649-7c5fd1286b37 tempest-ServersAdminTestJSON-673344979 tempest-ServersAdminTestJSON-673344979-project-member] Lock "aa4b9cc8-d0dc-4a0b-9eec-dceace695df9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.960s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 992.730145] env[68217]: DEBUG nova.network.neutron [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 992.745467] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961712, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.043408] env[68217]: DEBUG nova.compute.manager [req-29cceff8-9bec-4003-bddf-6bddfd2e516c req-e15947a9-1bd9-4eb1-b42d-dbf287d15846 service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Received event network-vif-plugged-893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 993.043742] env[68217]: DEBUG oslo_concurrency.lockutils [req-29cceff8-9bec-4003-bddf-6bddfd2e516c req-e15947a9-1bd9-4eb1-b42d-dbf287d15846 service nova] Acquiring lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.043978] env[68217]: DEBUG oslo_concurrency.lockutils [req-29cceff8-9bec-4003-bddf-6bddfd2e516c req-e15947a9-1bd9-4eb1-b42d-dbf287d15846 service nova] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.044194] env[68217]: DEBUG oslo_concurrency.lockutils [req-29cceff8-9bec-4003-bddf-6bddfd2e516c req-e15947a9-1bd9-4eb1-b42d-dbf287d15846 service nova] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.044368] env[68217]: DEBUG nova.compute.manager [req-29cceff8-9bec-4003-bddf-6bddfd2e516c req-e15947a9-1bd9-4eb1-b42d-dbf287d15846 service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] No waiting events found dispatching network-vif-plugged-893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 993.044542] env[68217]: WARNING nova.compute.manager [req-29cceff8-9bec-4003-bddf-6bddfd2e516c req-e15947a9-1bd9-4eb1-b42d-dbf287d15846 service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Received unexpected event network-vif-plugged-893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 for instance with vm_state building and task_state spawning. [ 993.051601] env[68217]: DEBUG nova.network.neutron [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Updating instance_info_cache with network_info: [{"id": "46e4edb9-72c4-4a7d-af91-4b553d829391", "address": "fa:16:3e:bb:64:fc", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e4edb9-72", "ovs_interfaceid": "46e4edb9-72c4-4a7d-af91-4b553d829391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.057237] env[68217]: DEBUG oslo_concurrency.lockutils [None req-77b7f1b4-8088-4805-9682-c9821039ed2c tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.241s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.123592] env[68217]: DEBUG nova.network.neutron [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Successfully updated port: 893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 993.211381] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961713, 'name': Destroy_Task, 'duration_secs': 0.478329} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.211668] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Destroyed the VM [ 993.211911] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 993.212210] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-bd11d175-4151-4767-bc3f-3fe0128fb5a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.226042] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 993.226042] env[68217]: value = "task-2961714" [ 993.226042] env[68217]: _type = "Task" [ 993.226042] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.236107] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961712, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.240016] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961714, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.487084] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b290e51-5811-40dc-8a4f-dc6101382e87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.499369] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-057e0f4c-3f13-4304-a233-4fe87a62bde2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.543845] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c6a4fb4-2cdf-423f-ac4d-0de4a577849c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.556024] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 993.560015] env[68217]: DEBUG nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Instance network_info: |[{"id": "46e4edb9-72c4-4a7d-af91-4b553d829391", "address": "fa:16:3e:bb:64:fc", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e4edb9-72", "ovs_interfaceid": "46e4edb9-72c4-4a7d-af91-4b553d829391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 993.560015] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:64:fc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0e00b2f1-c70f-4b21-86eb-810643cc1680', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '46e4edb9-72c4-4a7d-af91-4b553d829391', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 993.565736] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating folder: Project (1424003d74424a9e84d15879f2e634e3). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 993.568740] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-525513b5-780f-441d-96f0-14da702874a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.574971] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b65f9dbd-8d09-4954-9115-e6d228d85d51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.591999] env[68217]: DEBUG nova.compute.provider_tree [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 993.599569] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created folder: Project (1424003d74424a9e84d15879f2e634e3) in parent group-v594094. [ 993.599569] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating folder: Instances. Parent ref: group-v594351. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 993.599569] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-547c520b-6b8f-43f4-8c43-8f511c6c17f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.613017] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created folder: Instances in parent group-v594351. [ 993.613017] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 993.613017] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 993.613017] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-42bb406d-b30c-42bd-85db-4c0b66436194 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.632022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.632022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 993.632022] env[68217]: DEBUG nova.network.neutron [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 993.640027] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 993.640027] env[68217]: value = "task-2961717" [ 993.640027] env[68217]: _type = "Task" [ 993.640027] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.651297] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961717, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.736727] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961712, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.747294] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961714, 'name': RemoveSnapshot_Task} progress is 36%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.782054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.782445] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.782707] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 993.782916] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 993.783121] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 993.786470] env[68217]: INFO nova.compute.manager [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Terminating instance [ 994.100903] env[68217]: DEBUG nova.scheduler.client.report [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 994.153758] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961717, 'name': CreateVM_Task, 'duration_secs': 0.451976} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.153758] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 994.154366] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.154741] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.154845] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 994.155266] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e98e3164-b049-4c2f-82b8-e63293f4a23b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.165242] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 994.165242] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff095a-95ef-403a-3b9c-4b42536057b9" [ 994.165242] env[68217]: _type = "Task" [ 994.165242] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.178983] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff095a-95ef-403a-3b9c-4b42536057b9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.190613] env[68217]: DEBUG nova.network.neutron [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 994.243089] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961712, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.247910] env[68217]: DEBUG oslo_vmware.api [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961714, 'name': RemoveSnapshot_Task, 'duration_secs': 0.768622} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.247910] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 994.247910] env[68217]: INFO nova.compute.manager [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Took 13.85 seconds to snapshot the instance on the hypervisor. [ 994.290702] env[68217]: DEBUG nova.compute.manager [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 994.290921] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 994.291853] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ee412cb-e86e-4961-a2cf-1d66632005da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.303764] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 994.304581] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ae3b8ee8-7a7a-405e-87c9-11db9fda48da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.314230] env[68217]: DEBUG oslo_vmware.api [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 994.314230] env[68217]: value = "task-2961718" [ 994.314230] env[68217]: _type = "Task" [ 994.314230] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.327209] env[68217]: DEBUG oslo_vmware.api [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.389669] env[68217]: DEBUG nova.network.neutron [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Updating instance_info_cache with network_info: [{"id": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "address": "fa:16:3e:f9:bc:67", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap893a1ea4-f2", "ovs_interfaceid": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.608430] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.428s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 994.609033] env[68217]: DEBUG nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 994.611663] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 16.217s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 994.615066] env[68217]: DEBUG nova.objects.instance [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lazy-loading 'resources' on Instance uuid 01f97d0d-df21-441c-9dc6-5c51e3798d81 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.676863] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff095a-95ef-403a-3b9c-4b42536057b9, 'name': SearchDatastore_Task, 'duration_secs': 0.013397} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.677239] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.677501] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 994.677761] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 994.677977] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 994.678124] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 994.678416] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0178e118-7a54-4d49-b9ca-dd9d6e36c905 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.689972] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 994.690196] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 994.690975] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eb47f5d2-9d34-4122-99f7-666aaec6baa1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.697971] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 994.697971] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522285a1-8368-5fdd-f6d7-3ebbd51ada51" [ 994.697971] env[68217]: _type = "Task" [ 994.697971] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.709815] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522285a1-8368-5fdd-f6d7-3ebbd51ada51, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.733073] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961712, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.814649] env[68217]: DEBUG nova.compute.manager [None req-b0d8d111-9417-4f07-a422-4066b3f22597 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Found 1 images (rotation: 2) {{(pid=68217) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 994.829384] env[68217]: DEBUG oslo_vmware.api [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961718, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.892633] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 994.893470] env[68217]: DEBUG nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Instance network_info: |[{"id": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "address": "fa:16:3e:f9:bc:67", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap893a1ea4-f2", "ovs_interfaceid": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 994.893647] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f9:bc:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0e00b2f1-c70f-4b21-86eb-810643cc1680', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 994.903107] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 994.903591] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 994.904026] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b35275ae-affe-4758-bc72-8bea057d7de6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.931789] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 994.931789] env[68217]: value = "task-2961719" [ 994.931789] env[68217]: _type = "Task" [ 994.931789] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.948095] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961719, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.075429] env[68217]: DEBUG nova.compute.manager [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Received event network-changed-893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 995.075576] env[68217]: DEBUG nova.compute.manager [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Refreshing instance network info cache due to event network-changed-893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 995.075785] env[68217]: DEBUG oslo_concurrency.lockutils [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] Acquiring lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.075923] env[68217]: DEBUG oslo_concurrency.lockutils [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] Acquired lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.076236] env[68217]: DEBUG nova.network.neutron [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Refreshing network info cache for port 893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 995.114934] env[68217]: DEBUG nova.compute.utils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 995.116377] env[68217]: DEBUG nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 995.116626] env[68217]: DEBUG nova.network.neutron [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 995.158837] env[68217]: DEBUG nova.policy [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fd08981ea724019826d597a1c8b4ecd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6233e9874c41329f81c990f8bc72b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 995.213815] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522285a1-8368-5fdd-f6d7-3ebbd51ada51, 'name': SearchDatastore_Task, 'duration_secs': 0.011611} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.214718] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e7ccad6c-6e7a-4871-9e62-69fa5067fa53 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.225962] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 995.225962] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bfb5c9-0b90-48c1-22a1-7bbcfb32be50" [ 995.225962] env[68217]: _type = "Task" [ 995.225962] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.240218] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961712, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.531486} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.244096] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401/OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401.vmdk to [datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81/16498e3d-bc2f-4eea-bb32-2fe6e2472b81.vmdk. [ 995.244296] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Cleaning up location [datastore1] OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 995.244477] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_e69e7992-692d-4cd7-91ac-048433d24401 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.244730] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bfb5c9-0b90-48c1-22a1-7bbcfb32be50, 'name': SearchDatastore_Task, 'duration_secs': 0.011858} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.247319] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e5c1d5c-8e20-4bac-bad8-697b56f76ca0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.249291] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.249498] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/e8ed78ff-94dd-42d3-8a4d-8e58dc788e55.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 995.249992] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61488aeb-68de-4893-9d28-97b09ebfc754 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.260448] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 995.260448] env[68217]: value = "task-2961721" [ 995.260448] env[68217]: _type = "Task" [ 995.260448] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.260886] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 995.260886] env[68217]: value = "task-2961720" [ 995.260886] env[68217]: _type = "Task" [ 995.260886] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.279289] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961721, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.285263] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961720, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.329099] env[68217]: DEBUG oslo_vmware.api [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961718, 'name': PowerOffVM_Task, 'duration_secs': 0.981965} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.329363] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 995.329532] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 995.329809] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1c912122-89cc-4719-901b-d7ad70b0dc85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.399924] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 995.400181] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 995.400367] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleting the datastore file [datastore1] 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 995.400645] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b426792c-6326-41c2-bd2a-1350d09d1fe2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.423053] env[68217]: DEBUG oslo_vmware.api [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for the task: (returnval){ [ 995.423053] env[68217]: value = "task-2961723" [ 995.423053] env[68217]: _type = "Task" [ 995.423053] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.434864] env[68217]: DEBUG oslo_vmware.api [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961723, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.445190] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961719, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.461497] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f4ccdd3-451e-47bf-95e3-dc28f729d32a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.470207] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7ca5ac6-adef-4e35-a2d0-e88974deb794 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.512087] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f7d9ac4-80e1-44ec-ae6a-83dc19eca946 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.523372] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6add7ae0-8cca-4f9c-b5ed-bee9a79bdeac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.542460] env[68217]: DEBUG nova.compute.provider_tree [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 995.619298] env[68217]: DEBUG nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 995.685105] env[68217]: DEBUG nova.compute.manager [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 995.686064] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f50013-306b-40b4-8c5e-3de36c4babc4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.777765] env[68217]: DEBUG nova.network.neutron [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Successfully created port: 0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.786686] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961721, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045712} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.789334] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.789526] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81/16498e3d-bc2f-4eea-bb32-2fe6e2472b81.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 995.789815] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81/16498e3d-bc2f-4eea-bb32-2fe6e2472b81.vmdk to [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 995.790116] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961720, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.790342] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9a4f6366-8d3e-4657-b23c-1bd88bd709d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.800586] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 995.800586] env[68217]: value = "task-2961724" [ 995.800586] env[68217]: _type = "Task" [ 995.800586] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.813837] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961724, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.936532] env[68217]: DEBUG oslo_vmware.api [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Task: {'id': task-2961723, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.189706} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.940019] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 995.940239] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 995.940423] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 995.940593] env[68217]: INFO nova.compute.manager [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Took 1.65 seconds to destroy the instance on the hypervisor. [ 995.940854] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 995.941262] env[68217]: DEBUG nova.compute.manager [-] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 995.941262] env[68217]: DEBUG nova.network.neutron [-] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 995.950865] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961719, 'name': CreateVM_Task, 'duration_secs': 0.723396} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.950939] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 995.951644] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.951793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 995.952125] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 995.952399] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63a0441c-d403-4dab-9421-93d10d2c74ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.960017] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 995.960017] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dd022f-8cdc-70de-d41d-12b55ca63f48" [ 995.960017] env[68217]: _type = "Task" [ 995.960017] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.968109] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dd022f-8cdc-70de-d41d-12b55ca63f48, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.073974] env[68217]: ERROR nova.scheduler.client.report [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [req-1d819f87-7e7a-4e51-a285-da0d2f142156] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-1d819f87-7e7a-4e51-a285-da0d2f142156"}]} [ 996.098105] env[68217]: DEBUG nova.scheduler.client.report [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 996.104518] env[68217]: DEBUG nova.network.neutron [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Updated VIF entry in instance network info cache for port 893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 996.104987] env[68217]: DEBUG nova.network.neutron [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Updating instance_info_cache with network_info: [{"id": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "address": "fa:16:3e:f9:bc:67", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap893a1ea4-f2", "ovs_interfaceid": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.121220] env[68217]: DEBUG nova.scheduler.client.report [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 996.121482] env[68217]: DEBUG nova.compute.provider_tree [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 996.149398] env[68217]: DEBUG nova.scheduler.client.report [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 996.177309] env[68217]: DEBUG nova.scheduler.client.report [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 996.203834] env[68217]: INFO nova.compute.manager [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] instance snapshotting [ 996.204635] env[68217]: DEBUG nova.objects.instance [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'flavor' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.280687] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961720, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.950642} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.280950] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/e8ed78ff-94dd-42d3-8a4d-8e58dc788e55.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 996.281179] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 996.281521] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d424fbb-b20f-4ec6-b8bb-b929415c577f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.293811] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 996.293811] env[68217]: value = "task-2961725" [ 996.293811] env[68217]: _type = "Task" [ 996.293811] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.303402] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961725, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.318884] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961724, 'name': CopyVirtualDisk_Task} progress is 9%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.475185] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dd022f-8cdc-70de-d41d-12b55ca63f48, 'name': SearchDatastore_Task, 'duration_secs': 0.070931} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.475661] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.475934] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 996.476195] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 996.476887] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 996.476887] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 996.477036] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82c0800d-c913-428b-9afc-7d97375a3ab8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.480759] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25023912-b7f8-473d-97cf-8cb50e5ee329 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.494062] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd18b845-c404-42c3-a805-f72dbdf21b11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.499590] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 996.499796] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 996.501080] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bd401ab6-65cb-4a89-83e2-1cf94fbc33ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.510183] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 996.510183] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526347ad-f22d-d31e-404e-a8e24adc4204" [ 996.510183] env[68217]: _type = "Task" [ 996.510183] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.545901] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66b03c8-16d7-4e2a-9f55-931547b717e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.560568] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-121df0bc-dc14-4690-9fbf-fa2fbfc9224f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.565276] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526347ad-f22d-d31e-404e-a8e24adc4204, 'name': SearchDatastore_Task, 'duration_secs': 0.016121} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.566950] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-13f7dd83-0f7d-4e25-81ce-8c9509030aa1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.572473] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 996.572473] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52de4b71-d81a-dc7f-c678-96548fba1ffc" [ 996.572473] env[68217]: _type = "Task" [ 996.572473] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.581193] env[68217]: DEBUG nova.compute.provider_tree [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 996.596840] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52de4b71-d81a-dc7f-c678-96548fba1ffc, 'name': SearchDatastore_Task, 'duration_secs': 0.015442} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.597731] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.597731] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 996.597945] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7c20f59c-f49a-4de6-a2a4-ac5e8b26fd51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.609932] env[68217]: DEBUG oslo_concurrency.lockutils [req-48e24ecc-e6ca-4e38-ac32-99dfc14d4c7b req-76e3e76d-4c66-4d8c-989e-1f4f69b29a6e service nova] Releasing lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 996.614318] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 996.614318] env[68217]: value = "task-2961726" [ 996.614318] env[68217]: _type = "Task" [ 996.614318] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.628021] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961726, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.634951] env[68217]: DEBUG nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 996.663413] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 996.663711] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.663812] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 996.663932] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.664103] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 996.664266] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 996.664500] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 996.664622] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 996.664786] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 996.664946] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 996.665133] env[68217]: DEBUG nova.virt.hardware [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 996.666430] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0317828-0cdb-46c2-94d4-42e05422cb16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.680345] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2156262-ca32-4ae7-b163-c90dc54f3dc2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.713609] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39938107-1226-4ea7-8ec1-9a3a5bdda4cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.740238] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0c54c84-a71a-4501-81ac-b690ead76261 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.809682] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961725, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.13711} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.810406] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 996.811241] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50066205-9849-43f5-81e8-68127c392030 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.817154] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961724, 'name': CopyVirtualDisk_Task} progress is 29%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.839120] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/e8ed78ff-94dd-42d3-8a4d-8e58dc788e55.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 996.839120] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-130b7e9a-a88f-4c7f-9036-af3729871508 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.864952] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 996.864952] env[68217]: value = "task-2961727" [ 996.864952] env[68217]: _type = "Task" [ 996.864952] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.878598] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961727, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.916284] env[68217]: DEBUG nova.network.neutron [-] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 997.088555] env[68217]: DEBUG nova.scheduler.client.report [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 997.104782] env[68217]: DEBUG nova.compute.manager [req-262bec64-44c4-4393-9e2f-15a498b92e87 req-553ac597-05f2-42f7-98c3-83488ec265fc service nova] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Received event network-vif-deleted-2b488304-74ad-4bc6-9590-24a5d0f5d001 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 997.127538] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961726, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.257574] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 997.257956] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-6e9f43f1-bbed-4dbb-af14-e534bd7b82bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.270775] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 997.270775] env[68217]: value = "task-2961728" [ 997.270775] env[68217]: _type = "Task" [ 997.270775] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.285011] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961728, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.316886] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961724, 'name': CopyVirtualDisk_Task} progress is 52%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.380586] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961727, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.419683] env[68217]: INFO nova.compute.manager [-] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Took 1.48 seconds to deallocate network for instance. [ 997.595375] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.983s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 997.598568] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.531s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 997.602868] env[68217]: INFO nova.compute.claims [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 997.632451] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961726, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.594921} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.632839] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 997.633124] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 997.633458] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ef0b656-7148-4879-80ef-3e4cc23d7796 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.639272] env[68217]: INFO nova.scheduler.client.report [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted allocations for instance 01f97d0d-df21-441c-9dc6-5c51e3798d81 [ 997.647457] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 997.647457] env[68217]: value = "task-2961729" [ 997.647457] env[68217]: _type = "Task" [ 997.647457] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.665607] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961729, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.789898] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961728, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.809345] env[68217]: DEBUG nova.network.neutron [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Successfully updated port: 0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.824331] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961724, 'name': CopyVirtualDisk_Task} progress is 71%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.881570] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961727, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.927808] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 998.152812] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2be45061-7ea7-4352-8e66-37d063426ac9 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "01f97d0d-df21-441c-9dc6-5c51e3798d81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 22.704s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 998.164749] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961729, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.368183} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.165209] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 998.166109] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72799ce5-c0bf-40da-aed2-470e36efc4a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.194383] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.194383] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a4246e6c-0011-4c18-b8b2-2b141161c84f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.221873] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 998.221873] env[68217]: value = "task-2961730" [ 998.221873] env[68217]: _type = "Task" [ 998.221873] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.235665] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961730, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.287606] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961728, 'name': CreateSnapshot_Task, 'duration_secs': 0.80091} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.287842] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 998.288613] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3ee7850-acc5-4956-8373-0b6ece5641f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.312079] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "refresh_cache-66ca9efd-1839-4e98-b006-5fc3adda375d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 998.312279] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "refresh_cache-66ca9efd-1839-4e98-b006-5fc3adda375d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 998.312499] env[68217]: DEBUG nova.network.neutron [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 998.326486] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961724, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.383105] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961727, 'name': ReconfigVM_Task, 'duration_secs': 1.499732} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.383442] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Reconfigured VM instance instance-0000005c to attach disk [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/e8ed78ff-94dd-42d3-8a4d-8e58dc788e55.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 998.384851] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a6829f8a-9917-4c1d-9bba-6fbe288ab998 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.398730] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 998.398730] env[68217]: value = "task-2961731" [ 998.398730] env[68217]: _type = "Task" [ 998.398730] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.429609] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961731, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.734343] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961730, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.815739] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 998.817991] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-004bb7e6-2fc3-44f4-afa4-df24582fd798 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.825647] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9cc2c4f4-a201-4d04-b576-f852733c4061 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.831139] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961724, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.680378} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.832798] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/16498e3d-bc2f-4eea-bb32-2fe6e2472b81/16498e3d-bc2f-4eea-bb32-2fe6e2472b81.vmdk to [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 998.833216] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 998.833216] env[68217]: value = "task-2961732" [ 998.833216] env[68217]: _type = "Task" [ 998.833216] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.833886] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d749c37-7cbc-41c5-93f6-e2e6d0235a34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.842444] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbf1e182-51c3-45c5-8d5d-5fd3d290bb41 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.864148] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfiguring VM instance instance-0000002d to attach disk [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 998.867964] env[68217]: DEBUG nova.network.neutron [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 998.870560] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8390b935-79c6-4d31-819c-646d9b89f814 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.887148] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961732, 'name': CloneVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.915504] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89505c40-9c39-4bc0-ba21-c98f30ed08a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.919148] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 998.919148] env[68217]: value = "task-2961733" [ 998.919148] env[68217]: _type = "Task" [ 998.919148] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.929264] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961731, 'name': Rename_Task, 'duration_secs': 0.251584} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.930051] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 998.931241] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be9922c7-17c5-4cc1-b616-2ceec16a5dcb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.940707] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-eab8b25d-4657-4bd0-8a33-ea274d8299a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.942334] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961733, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.949844] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 998.949844] env[68217]: value = "task-2961734" [ 998.949844] env[68217]: _type = "Task" [ 998.949844] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.960345] env[68217]: DEBUG nova.compute.provider_tree [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 998.975554] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961734, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.136517] env[68217]: DEBUG nova.compute.manager [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Received event network-vif-plugged-0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.136781] env[68217]: DEBUG oslo_concurrency.lockutils [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] Acquiring lock "66ca9efd-1839-4e98-b006-5fc3adda375d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.137027] env[68217]: DEBUG oslo_concurrency.lockutils [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.137206] env[68217]: DEBUG oslo_concurrency.lockutils [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.137398] env[68217]: DEBUG nova.compute.manager [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] No waiting events found dispatching network-vif-plugged-0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 999.137578] env[68217]: WARNING nova.compute.manager [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Received unexpected event network-vif-plugged-0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 for instance with vm_state building and task_state spawning. [ 999.137737] env[68217]: DEBUG nova.compute.manager [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Received event network-changed-0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 999.137885] env[68217]: DEBUG nova.compute.manager [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Refreshing instance network info cache due to event network-changed-0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 999.138064] env[68217]: DEBUG oslo_concurrency.lockutils [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] Acquiring lock "refresh_cache-66ca9efd-1839-4e98-b006-5fc3adda375d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.151087] env[68217]: DEBUG nova.network.neutron [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Updating instance_info_cache with network_info: [{"id": "0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3", "address": "fa:16:3e:e6:7c:53", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ed5e62a-cf", "ovs_interfaceid": "0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.181725] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "ffff4cf4-f663-4965-84d1-8351bfde1252" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 999.181725] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.233376] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961730, 'name': ReconfigVM_Task, 'duration_secs': 0.571764} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.233743] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Reconfigured VM instance instance-0000005d to attach disk [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.234460] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-9a55481f-d94f-4111-a915-3a21488ed8d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.242381] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 999.242381] env[68217]: value = "task-2961735" [ 999.242381] env[68217]: _type = "Task" [ 999.242381] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.251460] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961735, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.346599] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961732, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.429573] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961733, 'name': ReconfigVM_Task, 'duration_secs': 0.400033} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.429848] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfigured VM instance instance-0000002d to attach disk [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473/d14026b1-84dd-430e-be94-94dcb1f47473.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 999.431285] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'device_type': 'disk', 'size': 0, 'encryption_options': None, 'encryption_secret_uuid': None, 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'disk_bus': None, 'device_name': '/dev/sda', 'image_id': '575ba628-84b6-4b0c-98ba-305166627d10'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'attachment_id': '2375c380-53a2-44f9-a223-cec76f9ea6c8', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594343', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'name': 'volume-3e056b4f-fea1-46b1-b841-37e7f391cb46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd14026b1-84dd-430e-be94-94dcb1f47473', 'attached_at': '', 'detached_at': '', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'serial': '3e056b4f-fea1-46b1-b841-37e7f391cb46'}, 'guest_format': None, 'delete_on_termination': False, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=68217) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 999.431493] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 999.431684] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594343', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'name': 'volume-3e056b4f-fea1-46b1-b841-37e7f391cb46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd14026b1-84dd-430e-be94-94dcb1f47473', 'attached_at': '', 'detached_at': '', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'serial': '3e056b4f-fea1-46b1-b841-37e7f391cb46'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 999.432533] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5662fb18-979d-43f1-9fa6-bfbb2e53a240 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.449123] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3273d71-a6cc-4a7b-9e1c-ee3f8a94360a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.469418] env[68217]: DEBUG nova.scheduler.client.report [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 999.479786] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfiguring VM instance instance-0000002d to attach disk [datastore2] volume-3e056b4f-fea1-46b1-b841-37e7f391cb46/volume-3e056b4f-fea1-46b1-b841-37e7f391cb46.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 999.483388] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21f1ea41-5f91-4ddc-8e74-b1d4dfd1bcd4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.503975] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961734, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.505551] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 999.505551] env[68217]: value = "task-2961736" [ 999.505551] env[68217]: _type = "Task" [ 999.505551] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.515728] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961736, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.653396] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "refresh_cache-66ca9efd-1839-4e98-b006-5fc3adda375d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 999.653752] env[68217]: DEBUG nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Instance network_info: |[{"id": "0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3", "address": "fa:16:3e:e6:7c:53", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ed5e62a-cf", "ovs_interfaceid": "0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 999.654094] env[68217]: DEBUG oslo_concurrency.lockutils [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] Acquired lock "refresh_cache-66ca9efd-1839-4e98-b006-5fc3adda375d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 999.654363] env[68217]: DEBUG nova.network.neutron [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Refreshing network info cache for port 0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 999.655642] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e6:7c:53', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 999.664224] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 999.667464] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 999.668017] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ac3a0d86-9ba0-4923-8d07-0a30ae0c1708 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.683735] env[68217]: DEBUG nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 999.693037] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 999.693037] env[68217]: value = "task-2961737" [ 999.693037] env[68217]: _type = "Task" [ 999.693037] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.702833] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961737, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.753055] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961735, 'name': Rename_Task, 'duration_secs': 0.153203} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.753479] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 999.753534] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1b34509d-27cb-4ef1-86b1-a8132f757865 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.761837] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 999.761837] env[68217]: value = "task-2961738" [ 999.761837] env[68217]: _type = "Task" [ 999.761837] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.777092] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961738, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.853413] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961732, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.979837] env[68217]: DEBUG oslo_vmware.api [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961734, 'name': PowerOnVM_Task, 'duration_secs': 0.707312} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.980137] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 999.980346] env[68217]: INFO nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Took 10.24 seconds to spawn the instance on the hypervisor. [ 999.980529] env[68217]: DEBUG nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 999.981299] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 999.981809] env[68217]: DEBUG nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 999.984802] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db388873-ce5c-47d5-9238-2991d27529fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.988774] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.247s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 999.990037] env[68217]: INFO nova.compute.claims [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1000.014812] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961736, 'name': ReconfigVM_Task, 'duration_secs': 0.441208} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.015054] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfigured VM instance instance-0000002d to attach disk [datastore2] volume-3e056b4f-fea1-46b1-b841-37e7f391cb46/volume-3e056b4f-fea1-46b1-b841-37e7f391cb46.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1000.020647] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4ea9f3d7-ee51-465a-b227-cdd0ea831a93 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.038824] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1000.038824] env[68217]: value = "task-2961739" [ 1000.038824] env[68217]: _type = "Task" [ 1000.038824] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.051801] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961739, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.209296] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961737, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.215081] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1000.224310] env[68217]: DEBUG nova.network.neutron [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Updated VIF entry in instance network info cache for port 0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1000.224781] env[68217]: DEBUG nova.network.neutron [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Updating instance_info_cache with network_info: [{"id": "0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3", "address": "fa:16:3e:e6:7c:53", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ed5e62a-cf", "ovs_interfaceid": "0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.279553] env[68217]: DEBUG oslo_vmware.api [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961738, 'name': PowerOnVM_Task, 'duration_secs': 0.484277} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.279829] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1000.280050] env[68217]: INFO nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1000.280258] env[68217]: DEBUG nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1000.281229] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a026180c-9489-4e1e-8c27-7fc309c343d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.353085] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961732, 'name': CloneVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.489873] env[68217]: DEBUG nova.compute.utils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1000.491240] env[68217]: DEBUG nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1000.491419] env[68217]: DEBUG nova.network.neutron [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1000.506674] env[68217]: INFO nova.compute.manager [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Took 27.47 seconds to build instance. [ 1000.530592] env[68217]: DEBUG nova.policy [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '65e5f874d4fb4e9c9ee477b7dfb7748e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e1a02f6f1b5d4a13b59e08dd51a81137', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1000.548181] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961739, 'name': ReconfigVM_Task, 'duration_secs': 0.200629} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.548485] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594343', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'name': 'volume-3e056b4f-fea1-46b1-b841-37e7f391cb46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd14026b1-84dd-430e-be94-94dcb1f47473', 'attached_at': '', 'detached_at': '', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'serial': '3e056b4f-fea1-46b1-b841-37e7f391cb46'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1000.549075] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4f9dae04-5953-4b5f-8428-1b685094b0f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.555928] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1000.555928] env[68217]: value = "task-2961740" [ 1000.555928] env[68217]: _type = "Task" [ 1000.555928] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.564176] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961740, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.707752] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961737, 'name': CreateVM_Task, 'duration_secs': 0.529689} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.707752] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1000.707752] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.707752] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1000.707752] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1000.707752] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc7fc90f-43cf-4c63-bed2-5575fdbefa5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.713019] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1000.713019] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e11a63-5e05-28aa-5d9d-eadd1ad5ccf4" [ 1000.713019] env[68217]: _type = "Task" [ 1000.713019] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.721042] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e11a63-5e05-28aa-5d9d-eadd1ad5ccf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.727761] env[68217]: DEBUG oslo_concurrency.lockutils [req-426bc60e-4d8c-4521-a111-fdb1b5cdad9b req-f0f31646-5899-439e-b7a3-09e556a97875 service nova] Releasing lock "refresh_cache-66ca9efd-1839-4e98-b006-5fc3adda375d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1000.801562] env[68217]: INFO nova.compute.manager [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Took 27.33 seconds to build instance. [ 1000.856032] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961732, 'name': CloneVM_Task, 'duration_secs': 1.841556} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.856032] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Created linked-clone VM from snapshot [ 1000.856032] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62435539-8f48-42f1-b487-f9e33f6a9beb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.861446] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Uploading image 9ee6281c-21fa-40a7-ae36-cd54a57dfaea {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1000.866189] env[68217]: DEBUG nova.network.neutron [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Successfully created port: e30969ad-58d0-4513-8a14-98c33a9ff504 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1000.885826] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1000.885826] env[68217]: value = "vm-594356" [ 1000.885826] env[68217]: _type = "VirtualMachine" [ 1000.885826] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1000.886464] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-f26e51a1-f341-4ad5-ab36-585e4b0c8457 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.893898] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease: (returnval){ [ 1000.893898] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ef2048-3a04-e0ed-07e9-cbc2ad5fca91" [ 1000.893898] env[68217]: _type = "HttpNfcLease" [ 1000.893898] env[68217]: } obtained for exporting VM: (result){ [ 1000.893898] env[68217]: value = "vm-594356" [ 1000.893898] env[68217]: _type = "VirtualMachine" [ 1000.893898] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1000.894314] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the lease: (returnval){ [ 1000.894314] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ef2048-3a04-e0ed-07e9-cbc2ad5fca91" [ 1000.894314] env[68217]: _type = "HttpNfcLease" [ 1000.894314] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1000.901061] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1000.901061] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ef2048-3a04-e0ed-07e9-cbc2ad5fca91" [ 1000.901061] env[68217]: _type = "HttpNfcLease" [ 1000.901061] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1000.994675] env[68217]: DEBUG nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1001.008150] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6a0a94a7-95f6-4f56-80e7-17e27a243c21 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.982s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.067430] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961740, 'name': Rename_Task, 'duration_secs': 0.250351} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.071478] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.072595] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1251d468-3385-4f57-b474-cdc231919923 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.080334] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1001.080334] env[68217]: value = "task-2961742" [ 1001.080334] env[68217]: _type = "Task" [ 1001.080334] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.095666] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961742, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.222704] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e11a63-5e05-28aa-5d9d-eadd1ad5ccf4, 'name': SearchDatastore_Task, 'duration_secs': 0.011315} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.223031] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.223617] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1001.223617] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.223706] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1001.223810] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1001.224085] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2fe008cb-db8b-42c2-9a6b-554b0869d410 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.239301] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1001.239492] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1001.241065] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1483e492-8a77-4d9a-bd3b-19e82fedf1f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.244211] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b24df210-924a-4431-8f02-9b9c40e6287b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.250101] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1001.250101] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e0d0ad-c097-304a-bec6-735d184d295a" [ 1001.250101] env[68217]: _type = "Task" [ 1001.250101] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.255478] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23af791-b051-4642-9a8d-aaa7dcb9d38f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.265036] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e0d0ad-c097-304a-bec6-735d184d295a, 'name': SearchDatastore_Task, 'duration_secs': 0.009941} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.289009] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44ba2826-f9e0-4630-8fb8-11302d70e768 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.291779] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0e8fe1-79d8-450b-b7ae-e678da4c9427 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.299581] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1001.299581] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d4b2c3-03f3-bcd3-9782-6063600dad22" [ 1001.299581] env[68217]: _type = "Task" [ 1001.299581] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.300871] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a1dc05d-b24f-437c-9cc4-1c38c8be80a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.305106] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8f4d2-929b-41df-9f36-77b28ef9e5d4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.839s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1001.313386] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d4b2c3-03f3-bcd3-9782-6063600dad22, 'name': SearchDatastore_Task, 'duration_secs': 0.010465} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.320743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1001.321035] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 66ca9efd-1839-4e98-b006-5fc3adda375d/66ca9efd-1839-4e98-b006-5fc3adda375d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.321504] env[68217]: DEBUG nova.compute.provider_tree [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.322775] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a9ca6a0c-f93c-4897-add7-f9c1c1032147 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.330388] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1001.330388] env[68217]: value = "task-2961743" [ 1001.330388] env[68217]: _type = "Task" [ 1001.330388] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.341215] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961743, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.403300] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1001.403300] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ef2048-3a04-e0ed-07e9-cbc2ad5fca91" [ 1001.403300] env[68217]: _type = "HttpNfcLease" [ 1001.403300] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1001.403594] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1001.403594] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ef2048-3a04-e0ed-07e9-cbc2ad5fca91" [ 1001.403594] env[68217]: _type = "HttpNfcLease" [ 1001.403594] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1001.404423] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446d264d-b162-43bf-a379-c389ab0f39df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.424746] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c9808e-9dfe-69b6-e164-ebf977e60187/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1001.425020] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c9808e-9dfe-69b6-e164-ebf977e60187/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1001.593878] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961742, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.690049] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fbfee041-f8d6-49a1-a03f-c0000fd8aa90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.826283] env[68217]: DEBUG nova.scheduler.client.report [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1001.841141] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961743, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496665} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.841877] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 66ca9efd-1839-4e98-b006-5fc3adda375d/66ca9efd-1839-4e98-b006-5fc3adda375d.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1001.841877] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1001.842718] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-01310509-f69d-4b4a-af6d-f1a49c12231b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.850423] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1001.850423] env[68217]: value = "task-2961744" [ 1001.850423] env[68217]: _type = "Task" [ 1001.850423] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.861434] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961744, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.007235] env[68217]: DEBUG nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1002.037260] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1002.037509] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1002.039821] env[68217]: DEBUG nova.virt.hardware [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1002.040118] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1443fa-32ec-40d3-905e-b44dad8d7c32 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.050408] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a90a63-709c-4076-8c24-5e7dc57afa46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.092486] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961742, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.099295] env[68217]: INFO nova.compute.manager [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Rescuing [ 1002.099754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.099959] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.100213] env[68217]: DEBUG nova.network.neutron [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.282535] env[68217]: DEBUG nova.compute.manager [req-928b4298-3eec-41d0-bffc-0bb2dce09ec0 req-a99f9beb-6ef5-43a8-b5e6-2576d307433a service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Received event network-vif-plugged-e30969ad-58d0-4513-8a14-98c33a9ff504 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1002.282864] env[68217]: DEBUG oslo_concurrency.lockutils [req-928b4298-3eec-41d0-bffc-0bb2dce09ec0 req-a99f9beb-6ef5-43a8-b5e6-2576d307433a service nova] Acquiring lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1002.283177] env[68217]: DEBUG oslo_concurrency.lockutils [req-928b4298-3eec-41d0-bffc-0bb2dce09ec0 req-a99f9beb-6ef5-43a8-b5e6-2576d307433a service nova] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.283395] env[68217]: DEBUG oslo_concurrency.lockutils [req-928b4298-3eec-41d0-bffc-0bb2dce09ec0 req-a99f9beb-6ef5-43a8-b5e6-2576d307433a service nova] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.283612] env[68217]: DEBUG nova.compute.manager [req-928b4298-3eec-41d0-bffc-0bb2dce09ec0 req-a99f9beb-6ef5-43a8-b5e6-2576d307433a service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] No waiting events found dispatching network-vif-plugged-e30969ad-58d0-4513-8a14-98c33a9ff504 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1002.283890] env[68217]: WARNING nova.compute.manager [req-928b4298-3eec-41d0-bffc-0bb2dce09ec0 req-a99f9beb-6ef5-43a8-b5e6-2576d307433a service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Received unexpected event network-vif-plugged-e30969ad-58d0-4513-8a14-98c33a9ff504 for instance with vm_state building and task_state spawning. [ 1002.332979] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1002.333762] env[68217]: DEBUG nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1002.336620] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.005s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1002.338153] env[68217]: INFO nova.compute.claims [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1002.362768] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961744, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.088366} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.363129] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1002.365359] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-800f660f-c44a-4203-aa71-98ce5ce231f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.392512] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Reconfiguring VM instance instance-0000005e to attach disk [datastore2] 66ca9efd-1839-4e98-b006-5fc3adda375d/66ca9efd-1839-4e98-b006-5fc3adda375d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1002.395212] env[68217]: DEBUG nova.network.neutron [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Successfully updated port: e30969ad-58d0-4513-8a14-98c33a9ff504 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1002.397825] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baeef52d-ebe7-4a8a-afc5-f294d0504701 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.418160] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "refresh_cache-d1fcac61-0d2a-4331-9042-af11c3c36ae4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.418506] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquired lock "refresh_cache-d1fcac61-0d2a-4331-9042-af11c3c36ae4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1002.418724] env[68217]: DEBUG nova.network.neutron [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1002.430724] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1002.430724] env[68217]: value = "task-2961745" [ 1002.430724] env[68217]: _type = "Task" [ 1002.430724] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.443109] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961745, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.593583] env[68217]: DEBUG oslo_vmware.api [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961742, 'name': PowerOnVM_Task, 'duration_secs': 1.183898} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.594392] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.743768] env[68217]: DEBUG nova.compute.manager [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1002.744803] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fc06712-3e76-408b-bd1d-ff02311468c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.842909] env[68217]: DEBUG nova.compute.utils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1002.847479] env[68217]: DEBUG nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1002.847753] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1002.945922] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961745, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.972952] env[68217]: DEBUG nova.policy [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a5f9c6fc51404b3491c5c3decd27aa7e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '905b15e740ad4f879ba61518ba400680', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1003.023350] env[68217]: DEBUG nova.network.neutron [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1003.100917] env[68217]: DEBUG nova.network.neutron [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Updating instance_info_cache with network_info: [{"id": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "address": "fa:16:3e:f9:bc:67", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap893a1ea4-f2", "ovs_interfaceid": "893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.155746] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1003.156016] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1003.271160] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c86dd636-b0e1-474c-b834-8674001373fe tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 38.523s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1003.296130] env[68217]: DEBUG nova.network.neutron [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Updating instance_info_cache with network_info: [{"id": "e30969ad-58d0-4513-8a14-98c33a9ff504", "address": "fa:16:3e:ff:ab:48", "network": {"id": "6a484d0a-2174-4434-a9cd-da097c02487e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-948811419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1a02f6f1b5d4a13b59e08dd51a81137", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape30969ad-58", "ovs_interfaceid": "e30969ad-58d0-4513-8a14-98c33a9ff504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.345508] env[68217]: DEBUG nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1003.443214] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961745, 'name': ReconfigVM_Task, 'duration_secs': 0.553547} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.443576] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Reconfigured VM instance instance-0000005e to attach disk [datastore2] 66ca9efd-1839-4e98-b006-5fc3adda375d/66ca9efd-1839-4e98-b006-5fc3adda375d.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.444230] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-45dbe682-2ef2-470b-a18a-7b7f93e98f71 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.449154] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Successfully created port: a9ab050e-fd0e-469a-8c0a-a9794739f06b {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.457509] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1003.457509] env[68217]: value = "task-2961746" [ 1003.457509] env[68217]: _type = "Task" [ 1003.457509] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.469898] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961746, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.605052] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.632661] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-651d21c3-7c4a-4e78-bb97-d5722b8f5777 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.642379] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4878f9f9-5b8d-4f6e-9850-cde4f85dd50d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.680170] env[68217]: INFO nova.compute.manager [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Detaching volume 8c9d92b9-9da4-4b57-8504-b765266d4fee [ 1003.685181] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca325a7b-9e02-4785-ad96-a70f49f41c6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.701556] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f81ffaf7-fdf7-4c5a-ac2d-fb9b53e11fa4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.722063] env[68217]: DEBUG nova.compute.provider_tree [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.731841] env[68217]: INFO nova.virt.block_device [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Attempting to driver detach volume 8c9d92b9-9da4-4b57-8504-b765266d4fee from mountpoint /dev/sdb [ 1003.731917] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1003.732127] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594331', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'name': 'volume-8c9d92b9-9da4-4b57-8504-b765266d4fee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f', 'attached_at': '', 'detached_at': '', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'serial': '8c9d92b9-9da4-4b57-8504-b765266d4fee'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1003.733053] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cbdd1c9-893e-4142-a65c-c38955f5f608 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.765119] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35cc5885-2738-47aa-bba5-90565b6f406f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.775431] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30f2a69c-8a4e-4c7e-8b15-9d02ca629566 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.800841] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Releasing lock "refresh_cache-d1fcac61-0d2a-4331-9042-af11c3c36ae4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1003.801382] env[68217]: DEBUG nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Instance network_info: |[{"id": "e30969ad-58d0-4513-8a14-98c33a9ff504", "address": "fa:16:3e:ff:ab:48", "network": {"id": "6a484d0a-2174-4434-a9cd-da097c02487e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-948811419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1a02f6f1b5d4a13b59e08dd51a81137", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape30969ad-58", "ovs_interfaceid": "e30969ad-58d0-4513-8a14-98c33a9ff504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1003.802452] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ff:ab:48', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e30969ad-58d0-4513-8a14-98c33a9ff504', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1003.810222] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Creating folder: Project (e1a02f6f1b5d4a13b59e08dd51a81137). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1003.811025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52cefbe5-16c8-4d88-9a34-8128a5ac8d8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.814280] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7484396d-cc14-48a1-8d5d-8e0f5590b63e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.829712] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] The volume has not been displaced from its original location: [datastore2] volume-8c9d92b9-9da4-4b57-8504-b765266d4fee/volume-8c9d92b9-9da4-4b57-8504-b765266d4fee.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1003.836380] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Reconfiguring VM instance instance-00000030 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1003.838900] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Successfully created port: 79c6bcfe-54df-47ec-a39b-84e3bba24e55 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1003.842050] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6aa4cbba-842d-481a-9815-c2c4fdba0908 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.856196] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Created folder: Project (e1a02f6f1b5d4a13b59e08dd51a81137) in parent group-v594094. [ 1003.856495] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Creating folder: Instances. Parent ref: group-v594358. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1003.860218] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c4880bc3-2f0a-4f42-aa7c-ed2927bd4822 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.869693] env[68217]: DEBUG oslo_vmware.api [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1003.869693] env[68217]: value = "task-2961749" [ 1003.869693] env[68217]: _type = "Task" [ 1003.869693] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.874149] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Created folder: Instances in parent group-v594358. [ 1003.874410] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1003.874984] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1003.875232] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a25eb63e-c7d2-4aa7-8c01-04e0f4a7be13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.894446] env[68217]: DEBUG oslo_vmware.api [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961749, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.901482] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1003.901482] env[68217]: value = "task-2961750" [ 1003.901482] env[68217]: _type = "Task" [ 1003.901482] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.912257] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961750, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.970341] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961746, 'name': Rename_Task, 'duration_secs': 0.217083} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.970640] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1003.971296] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-82399946-2b7c-4fb4-ad26-116e7434f28b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.979976] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1003.979976] env[68217]: value = "task-2961751" [ 1003.979976] env[68217]: _type = "Task" [ 1003.979976] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.990373] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961751, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.226392] env[68217]: DEBUG nova.scheduler.client.report [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1004.315542] env[68217]: DEBUG nova.compute.manager [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Received event network-changed-e30969ad-58d0-4513-8a14-98c33a9ff504 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1004.315807] env[68217]: DEBUG nova.compute.manager [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Refreshing instance network info cache due to event network-changed-e30969ad-58d0-4513-8a14-98c33a9ff504. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1004.317977] env[68217]: DEBUG oslo_concurrency.lockutils [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] Acquiring lock "refresh_cache-d1fcac61-0d2a-4331-9042-af11c3c36ae4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.318313] env[68217]: DEBUG oslo_concurrency.lockutils [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] Acquired lock "refresh_cache-d1fcac61-0d2a-4331-9042-af11c3c36ae4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.318557] env[68217]: DEBUG nova.network.neutron [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Refreshing network info cache for port e30969ad-58d0-4513-8a14-98c33a9ff504 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1004.364025] env[68217]: DEBUG nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1004.380235] env[68217]: DEBUG oslo_vmware.api [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961749, 'name': ReconfigVM_Task, 'duration_secs': 0.36279} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.380566] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Reconfigured VM instance instance-00000030 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1004.385643] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c328cdef-660f-47b2-a90b-dafb03613e97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.404876] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1004.405129] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1004.405286] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1004.405465] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1004.405640] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1004.405762] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1004.405966] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1004.406134] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1004.406304] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1004.406549] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1004.406805] env[68217]: DEBUG nova.virt.hardware [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1004.407707] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22e5715a-c949-4c41-b114-0672a4853a9c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.414127] env[68217]: DEBUG oslo_vmware.api [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1004.414127] env[68217]: value = "task-2961752" [ 1004.414127] env[68217]: _type = "Task" [ 1004.414127] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.422718] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961750, 'name': CreateVM_Task, 'duration_secs': 0.45424} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.423296] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1004.424575] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5711440-79a4-4328-a1d0-1a6bd6fa928f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.432083] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.432261] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.432603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1004.432878] env[68217]: DEBUG oslo_vmware.api [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961752, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.433116] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45a16630-8c31-4233-9ac6-580b27f0f4dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.439317] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1004.439317] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52241c82-63e3-4151-2951-729d78fcf76a" [ 1004.439317] env[68217]: _type = "Task" [ 1004.439317] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.457888] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52241c82-63e3-4151-2951-729d78fcf76a, 'name': SearchDatastore_Task, 'duration_secs': 0.012977} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.458245] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1004.458590] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1004.458881] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1004.459065] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1004.459262] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.459548] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e4b91e8-5a97-4aef-9929-f9480b24a640 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.468874] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.469104] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1004.469915] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b5eb3771-9bfb-49dc-8378-215e0b2d8389 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.475293] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1004.475293] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d86188-a4c8-c314-8263-5d8f6640b462" [ 1004.475293] env[68217]: _type = "Task" [ 1004.475293] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.483787] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d86188-a4c8-c314-8263-5d8f6640b462, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.492058] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961751, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.731864] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1004.732520] env[68217]: DEBUG nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1004.735552] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 17.530s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1004.737231] env[68217]: INFO nova.compute.claims [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1004.926871] env[68217]: DEBUG oslo_vmware.api [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961752, 'name': ReconfigVM_Task, 'duration_secs': 0.172403} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.927216] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594331', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'name': 'volume-8c9d92b9-9da4-4b57-8504-b765266d4fee', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f', 'attached_at': '', 'detached_at': '', 'volume_id': '8c9d92b9-9da4-4b57-8504-b765266d4fee', 'serial': '8c9d92b9-9da4-4b57-8504-b765266d4fee'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1004.986965] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d86188-a4c8-c314-8263-5d8f6640b462, 'name': SearchDatastore_Task, 'duration_secs': 0.020561} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.993084] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3a186cbd-69bf-4b82-917f-7b132249b928 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.001507] env[68217]: DEBUG oslo_vmware.api [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961751, 'name': PowerOnVM_Task, 'duration_secs': 0.68389} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.002853] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.003084] env[68217]: INFO nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1005.003272] env[68217]: DEBUG nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1005.003600] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1005.003600] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528431be-6093-b409-2bac-b30094f6030d" [ 1005.003600] env[68217]: _type = "Task" [ 1005.003600] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.004453] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0178dc-a846-473a-aed8-700cdc60c041 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.019544] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528431be-6093-b409-2bac-b30094f6030d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.109113] env[68217]: DEBUG nova.network.neutron [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Updated VIF entry in instance network info cache for port e30969ad-58d0-4513-8a14-98c33a9ff504. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1005.109502] env[68217]: DEBUG nova.network.neutron [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Updating instance_info_cache with network_info: [{"id": "e30969ad-58d0-4513-8a14-98c33a9ff504", "address": "fa:16:3e:ff:ab:48", "network": {"id": "6a484d0a-2174-4434-a9cd-da097c02487e", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-948811419-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e1a02f6f1b5d4a13b59e08dd51a81137", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8ea16b8c-ebf6-4bd4-ab67-ddb472e7b460", "external-id": "nsx-vlan-transportzone-156", "segmentation_id": 156, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape30969ad-58", "ovs_interfaceid": "e30969ad-58d0-4513-8a14-98c33a9ff504", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.139534] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.139820] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-bd34f42f-a94f-4b7b-9e6f-d67d8df446e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.149573] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1005.149573] env[68217]: value = "task-2961753" [ 1005.149573] env[68217]: _type = "Task" [ 1005.149573] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.161184] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961753, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.241747] env[68217]: DEBUG nova.compute.utils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1005.245260] env[68217]: DEBUG nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1005.245426] env[68217]: DEBUG nova.network.neutron [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1005.327122] env[68217]: DEBUG nova.policy [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0939a9bd52d142818e49fbf0c576e4a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd093c295105c44cca8bd67bd514429d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1005.469490] env[68217]: DEBUG nova.objects.instance [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1005.517480] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528431be-6093-b409-2bac-b30094f6030d, 'name': SearchDatastore_Task, 'duration_secs': 0.021506} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.517686] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.517953] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d1fcac61-0d2a-4331-9042-af11c3c36ae4/d1fcac61-0d2a-4331-9042-af11c3c36ae4.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1005.518287] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3e721ff-bf91-440a-824e-4867785ab2dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.528712] env[68217]: INFO nova.compute.manager [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Took 28.66 seconds to build instance. [ 1005.533325] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1005.533325] env[68217]: value = "task-2961754" [ 1005.533325] env[68217]: _type = "Task" [ 1005.533325] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.540241] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961754, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.612516] env[68217]: DEBUG oslo_concurrency.lockutils [req-e012563e-2411-4cb0-a650-122ca1a99b94 req-3c45e877-4daf-4efa-a3b3-8ba4bde376f7 service nova] Releasing lock "refresh_cache-d1fcac61-0d2a-4331-9042-af11c3c36ae4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1005.673316] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961753, 'name': PowerOffVM_Task, 'duration_secs': 0.198898} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.673695] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1005.676943] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc55019d-0c17-4300-aefc-f361c3c0ee0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.709269] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd032369-b637-4155-9463-140cf4fb17d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.745022] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1005.745417] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d18e5a8-78c8-45ed-a735-88401fc19758 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.748329] env[68217]: DEBUG nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1005.766164] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1005.766164] env[68217]: value = "task-2961755" [ 1005.766164] env[68217]: _type = "Task" [ 1005.766164] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.775728] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1005.776273] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1005.776675] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1005.776985] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1005.779022] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1005.779022] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9a1b6047-e5cd-4a77-bfad-8916259c415a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.792021] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1005.792021] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1005.792021] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-393c1497-9c46-4f4e-9a81-09962fe2904f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.804033] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1005.804033] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5279b626-b76b-27c0-d69a-31b4ad2963f1" [ 1005.804033] env[68217]: _type = "Task" [ 1005.804033] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.811181] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5279b626-b76b-27c0-d69a-31b4ad2963f1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1005.901517] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Successfully updated port: a9ab050e-fd0e-469a-8c0a-a9794739f06b {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1005.937634] env[68217]: DEBUG nova.network.neutron [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Successfully created port: b5d29b01-b9b6-4d25-9fef-4a335cf05875 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1006.032579] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a771a937-b824-4a91-a2a1-fcba74b475cb tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.168s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.048360] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961754, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.098736] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24bb1609-6120-498e-8916-956192250a31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.109047] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee2aab26-c992-44ae-aaaa-7b3ff7a82eb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.149959] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2968818f-5a39-4208-81c5-a12fca5a31d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.160175] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bcf146-32dc-4ad4-ac82-251e6d1c5875 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.178470] env[68217]: DEBUG nova.compute.provider_tree [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1006.312878] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5279b626-b76b-27c0-d69a-31b4ad2963f1, 'name': SearchDatastore_Task, 'duration_secs': 0.033851} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.313948] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bea98cf-28db-4927-9971-32dbca12e184 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.320428] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1006.320428] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ba1ea6-31f7-13be-c35e-c14a5024f788" [ 1006.320428] env[68217]: _type = "Task" [ 1006.320428] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.330667] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ba1ea6-31f7-13be-c35e-c14a5024f788, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.421208] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.442574] env[68217]: DEBUG nova.compute.manager [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received event network-vif-plugged-a9ab050e-fd0e-469a-8c0a-a9794739f06b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1006.442794] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] Acquiring lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.442991] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.443174] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.443336] env[68217]: DEBUG nova.compute.manager [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] No waiting events found dispatching network-vif-plugged-a9ab050e-fd0e-469a-8c0a-a9794739f06b {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1006.443497] env[68217]: WARNING nova.compute.manager [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received unexpected event network-vif-plugged-a9ab050e-fd0e-469a-8c0a-a9794739f06b for instance with vm_state building and task_state spawning. [ 1006.443656] env[68217]: DEBUG nova.compute.manager [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received event network-changed-a9ab050e-fd0e-469a-8c0a-a9794739f06b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1006.443835] env[68217]: DEBUG nova.compute.manager [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Refreshing instance network info cache due to event network-changed-a9ab050e-fd0e-469a-8c0a-a9794739f06b. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1006.444235] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] Acquiring lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1006.444235] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] Acquired lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1006.444334] env[68217]: DEBUG nova.network.neutron [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Refreshing network info cache for port a9ab050e-fd0e-469a-8c0a-a9794739f06b {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1006.457275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "66ca9efd-1839-4e98-b006-5fc3adda375d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.457507] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.457734] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "66ca9efd-1839-4e98-b006-5fc3adda375d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1006.457940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.458122] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.459936] env[68217]: INFO nova.compute.manager [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Terminating instance [ 1006.482973] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0633abb9-af67-4c1e-b7c0-2432d4cc63b3 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.327s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1006.484524] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.064s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1006.484712] env[68217]: DEBUG nova.compute.manager [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1006.488671] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cf61e8-f642-434d-b33d-97593c66ac67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.498344] env[68217]: DEBUG nova.compute.manager [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1006.499548] env[68217]: DEBUG nova.objects.instance [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.543539] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961754, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.809104} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.543792] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d1fcac61-0d2a-4331-9042-af11c3c36ae4/d1fcac61-0d2a-4331-9042-af11c3c36ae4.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1006.544316] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1006.544316] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-973f0dcb-9d2a-470a-9c6a-f43e7ae5fa1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.551233] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1006.551233] env[68217]: value = "task-2961756" [ 1006.551233] env[68217]: _type = "Task" [ 1006.551233] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.561442] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961756, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.682633] env[68217]: DEBUG nova.scheduler.client.report [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1006.764728] env[68217]: DEBUG nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1006.791515] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1006.791752] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.791907] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1006.792102] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.792251] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1006.792398] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1006.793372] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1006.793587] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1006.793893] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1006.794078] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1006.794255] env[68217]: DEBUG nova.virt.hardware [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1006.795178] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d8ba36-bc50-4cc1-8c02-5f496efab650 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.803546] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c1a081-c408-4308-9179-83b462068659 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.829424] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ba1ea6-31f7-13be-c35e-c14a5024f788, 'name': SearchDatastore_Task, 'duration_secs': 0.020738} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.829668] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1006.829926] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. {{(pid=68217) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1006.830194] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-00d8fd34-7e5b-4e37-a49b-53d15cda3ee1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.837303] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1006.837303] env[68217]: value = "task-2961757" [ 1006.837303] env[68217]: _type = "Task" [ 1006.837303] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.845081] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961757, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.963732] env[68217]: DEBUG nova.compute.manager [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1006.964014] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1006.964909] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-550f4ba0-a083-41f8-92a1-989db348cf9f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.973503] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.973799] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9af846a8-00fd-4db0-a25b-fb793804c55a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.981174] env[68217]: DEBUG oslo_vmware.api [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1006.981174] env[68217]: value = "task-2961758" [ 1006.981174] env[68217]: _type = "Task" [ 1006.981174] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.982391] env[68217]: DEBUG nova.network.neutron [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1006.994184] env[68217]: DEBUG oslo_vmware.api [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961758, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.061264] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961756, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.165059} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.061544] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1007.062348] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a62476-ccac-469e-9626-12a444ab3ad4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.085664] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] d1fcac61-0d2a-4331-9042-af11c3c36ae4/d1fcac61-0d2a-4331-9042-af11c3c36ae4.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.086848] env[68217]: DEBUG nova.network.neutron [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.088363] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-294b7456-2ef2-4a34-ae05-022bc0822f48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.111132] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1007.111132] env[68217]: value = "task-2961759" [ 1007.111132] env[68217]: _type = "Task" [ 1007.111132] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.120474] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961759, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.188121] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.452s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1007.189031] env[68217]: DEBUG nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1007.191681] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 9.264s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1007.193743] env[68217]: DEBUG nova.objects.instance [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lazy-loading 'resources' on Instance uuid 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.348362] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961757, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.495027] env[68217]: DEBUG oslo_vmware.api [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961758, 'name': PowerOffVM_Task, 'duration_secs': 0.35146} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.495691] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1007.495873] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1007.496167] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4dd00a74-96ff-4bf7-a88d-9a588a8524ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.508108] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1007.508496] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-69bef4ff-2611-4314-a3e6-4ab417224217 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.521122] env[68217]: DEBUG oslo_vmware.api [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1007.521122] env[68217]: value = "task-2961761" [ 1007.521122] env[68217]: _type = "Task" [ 1007.521122] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.532522] env[68217]: DEBUG oslo_vmware.api [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961761, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.577988] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1007.578299] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1007.578569] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleting the datastore file [datastore2] 66ca9efd-1839-4e98-b006-5fc3adda375d {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1007.578919] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-19e8a689-3ca3-45d0-8bb6-808714e9ef2d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.587532] env[68217]: DEBUG oslo_vmware.api [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1007.587532] env[68217]: value = "task-2961762" [ 1007.587532] env[68217]: _type = "Task" [ 1007.587532] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.598426] env[68217]: DEBUG oslo_vmware.api [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961762, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.604248] env[68217]: DEBUG oslo_concurrency.lockutils [req-d6e554d1-39f5-436a-bd11-8f44d305faca req-36100e8d-7f4b-4e7a-a132-2f91326fac04 service nova] Releasing lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1007.622716] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961759, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.695707] env[68217]: DEBUG nova.compute.utils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1007.701025] env[68217]: DEBUG nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1007.701025] env[68217]: DEBUG nova.network.neutron [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1007.797054] env[68217]: DEBUG nova.policy [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '36e015ba5f004acea6418ed615a6ba70', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cf28bebd84964101a508b11ddc924552', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1007.848659] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961757, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.723358} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.848944] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. [ 1007.849804] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e3eea7b-02e0-44c6-af2f-47bbe8a30583 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.883997] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Reconfiguring VM instance instance-0000005d to attach disk [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1007.887041] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c99ec96e-1ade-470f-a5ed-3ee5e7970d63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.907008] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1007.907008] env[68217]: value = "task-2961763" [ 1007.907008] env[68217]: _type = "Task" [ 1007.907008] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.921271] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961763, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.015122] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-691c4b1d-1397-453d-8b7a-d7ab15bbd539 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.026649] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-877017fc-23d5-4348-86d1-4132672ca65a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.034746] env[68217]: DEBUG oslo_vmware.api [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961761, 'name': PowerOffVM_Task, 'duration_secs': 0.191847} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.061373] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1008.062150] env[68217]: DEBUG nova.compute.manager [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1008.062734] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f722cdaa-3f4d-4c1d-8656-6f040a695133 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.065969] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3015ed8-9df8-4f0d-b2a0-165018d54d78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.079022] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e07ee76-6ed5-42d4-9217-7bca0202abca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.094219] env[68217]: DEBUG nova.compute.provider_tree [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1008.104628] env[68217]: DEBUG oslo_vmware.api [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961762, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.250413} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.105885] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1008.106145] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1008.106647] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1008.106647] env[68217]: INFO nova.compute.manager [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1008.106756] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1008.107349] env[68217]: DEBUG nova.compute.manager [-] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1008.107349] env[68217]: DEBUG nova.network.neutron [-] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1008.121346] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961759, 'name': ReconfigVM_Task, 'duration_secs': 0.603045} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.121684] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Reconfigured VM instance instance-0000005f to attach disk [datastore1] d1fcac61-0d2a-4331-9042-af11c3c36ae4/d1fcac61-0d2a-4331-9042-af11c3c36ae4.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.122322] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3ac3a6a4-9efb-4698-9ef8-08c9965b94d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.129067] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1008.129067] env[68217]: value = "task-2961764" [ 1008.129067] env[68217]: _type = "Task" [ 1008.129067] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.138256] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961764, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.146237] env[68217]: DEBUG nova.compute.manager [req-dca0be0c-88ca-48bb-9e81-050fe0efd26a req-ce6744a8-46b8-44e6-9bc2-ab04b07c6d6b service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received event network-vif-plugged-79c6bcfe-54df-47ec-a39b-84e3bba24e55 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1008.146440] env[68217]: DEBUG oslo_concurrency.lockutils [req-dca0be0c-88ca-48bb-9e81-050fe0efd26a req-ce6744a8-46b8-44e6-9bc2-ab04b07c6d6b service nova] Acquiring lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.146707] env[68217]: DEBUG oslo_concurrency.lockutils [req-dca0be0c-88ca-48bb-9e81-050fe0efd26a req-ce6744a8-46b8-44e6-9bc2-ab04b07c6d6b service nova] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.146874] env[68217]: DEBUG oslo_concurrency.lockutils [req-dca0be0c-88ca-48bb-9e81-050fe0efd26a req-ce6744a8-46b8-44e6-9bc2-ab04b07c6d6b service nova] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.147046] env[68217]: DEBUG nova.compute.manager [req-dca0be0c-88ca-48bb-9e81-050fe0efd26a req-ce6744a8-46b8-44e6-9bc2-ab04b07c6d6b service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] No waiting events found dispatching network-vif-plugged-79c6bcfe-54df-47ec-a39b-84e3bba24e55 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1008.147219] env[68217]: WARNING nova.compute.manager [req-dca0be0c-88ca-48bb-9e81-050fe0efd26a req-ce6744a8-46b8-44e6-9bc2-ab04b07c6d6b service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received unexpected event network-vif-plugged-79c6bcfe-54df-47ec-a39b-84e3bba24e55 for instance with vm_state building and task_state spawning. [ 1008.203771] env[68217]: DEBUG nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1008.421912] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961763, 'name': ReconfigVM_Task, 'duration_secs': 0.369533} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.422401] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Reconfigured VM instance instance-0000005d to attach disk [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1008.426290] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d38c9f7-74c0-401b-a40c-689e0ee91914 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.431951] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Successfully updated port: 79c6bcfe-54df-47ec-a39b-84e3bba24e55 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.457449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.458356] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1008.458642] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1008.462172] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a30c464-23d9-49dc-9f4b-19b83c8ab832 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.479149] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1008.479149] env[68217]: value = "task-2961765" [ 1008.479149] env[68217]: _type = "Task" [ 1008.479149] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.489601] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961765, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.525377] env[68217]: DEBUG nova.compute.manager [req-c9d4fbaa-bf6a-4a7c-9a46-6d00c20d8fb9 req-2078ddd3-fdee-43a3-946c-3150253e0301 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Received event network-vif-deleted-0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1008.525377] env[68217]: INFO nova.compute.manager [req-c9d4fbaa-bf6a-4a7c-9a46-6d00c20d8fb9 req-2078ddd3-fdee-43a3-946c-3150253e0301 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Neutron deleted interface 0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3; detaching it from the instance and deleting it from the info cache [ 1008.525377] env[68217]: DEBUG nova.network.neutron [req-c9d4fbaa-bf6a-4a7c-9a46-6d00c20d8fb9 req-2078ddd3-fdee-43a3-946c-3150253e0301 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.548792] env[68217]: DEBUG nova.compute.manager [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Received event network-vif-plugged-b5d29b01-b9b6-4d25-9fef-4a335cf05875 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1008.549319] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] Acquiring lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1008.549783] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1008.552021] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.552021] env[68217]: DEBUG nova.compute.manager [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] No waiting events found dispatching network-vif-plugged-b5d29b01-b9b6-4d25-9fef-4a335cf05875 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1008.552021] env[68217]: WARNING nova.compute.manager [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Received unexpected event network-vif-plugged-b5d29b01-b9b6-4d25-9fef-4a335cf05875 for instance with vm_state building and task_state spawning. [ 1008.552021] env[68217]: DEBUG nova.compute.manager [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received event network-changed-79c6bcfe-54df-47ec-a39b-84e3bba24e55 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1008.552021] env[68217]: DEBUG nova.compute.manager [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Refreshing instance network info cache due to event network-changed-79c6bcfe-54df-47ec-a39b-84e3bba24e55. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1008.552021] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] Acquiring lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1008.554808] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1008.580416] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2068c22f-9022-4af8-a428-d4df91a74f13 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.096s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1008.603655] env[68217]: DEBUG nova.scheduler.client.report [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1008.615298] env[68217]: DEBUG nova.network.neutron [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Successfully updated port: b5d29b01-b9b6-4d25-9fef-4a335cf05875 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1008.639359] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961764, 'name': Rename_Task, 'duration_secs': 0.16742} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.639712] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.640020] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-169cdf7e-b3d1-4562-b7db-b9e70fffb36a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.647691] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1008.647691] env[68217]: value = "task-2961766" [ 1008.647691] env[68217]: _type = "Task" [ 1008.647691] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.656913] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961766, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.892721] env[68217]: DEBUG nova.network.neutron [-] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.990372] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961765, 'name': ReconfigVM_Task, 'duration_secs': 0.246599} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.990655] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1008.990944] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bbfaec84-305d-4618-9073-eb3ae16ef31c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.997677] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1008.997677] env[68217]: value = "task-2961767" [ 1008.997677] env[68217]: _type = "Task" [ 1008.997677] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.001360] env[68217]: DEBUG nova.network.neutron [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Successfully created port: c02a5df2-09c6-499e-8c8a-5e198ba3ef9e {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1009.008870] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961767, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.027981] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c5897401-7eb4-4d25-bb2d-2ea196ebce34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.038123] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-098078d1-89b8-485a-af0c-0c236672334e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.076423] env[68217]: DEBUG nova.compute.manager [req-c9d4fbaa-bf6a-4a7c-9a46-6d00c20d8fb9 req-2078ddd3-fdee-43a3-946c-3150253e0301 service nova] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Detach interface failed, port_id=0ed5e62a-cfdf-43c1-a32b-b3e9e14125a3, reason: Instance 66ca9efd-1839-4e98-b006-5fc3adda375d could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1009.098406] env[68217]: DEBUG nova.objects.instance [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.118023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.921s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.118023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.900s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1009.118023] env[68217]: INFO nova.compute.claims [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1009.119743] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.119907] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.120039] env[68217]: DEBUG nova.network.neutron [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.155557] env[68217]: INFO nova.scheduler.client.report [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Deleted allocations for instance 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc [ 1009.166337] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961766, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.213650] env[68217]: DEBUG nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1009.242017] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1009.242259] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1009.242416] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1009.242595] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1009.242738] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1009.242883] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1009.243101] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1009.243263] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1009.243427] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1009.243589] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1009.243765] env[68217]: DEBUG nova.virt.hardware [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1009.244676] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4000f40c-ee2a-43f5-981c-0bc08ae5788a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.252517] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281c2a2a-c7f1-4a87-9647-d03948e5df6c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.257500] env[68217]: DEBUG nova.network.neutron [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Updating instance_info_cache with network_info: [{"id": "a9ab050e-fd0e-469a-8c0a-a9794739f06b", "address": "fa:16:3e:30:73:93", "network": {"id": "42878174-e5ca-4794-b014-f0564b6f9268", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1527613629", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9ab050e-fd", "ovs_interfaceid": "a9ab050e-fd0e-469a-8c0a-a9794739f06b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "address": "fa:16:3e:4a:e2:54", "network": {"id": "0149085c-6f20-47db-87ac-aa2336e92240", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1636213118", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c6bcfe-54", "ovs_interfaceid": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1009.395695] env[68217]: INFO nova.compute.manager [-] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Took 1.29 seconds to deallocate network for instance. [ 1009.509234] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961767, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.603959] env[68217]: DEBUG oslo_concurrency.lockutils [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.604204] env[68217]: DEBUG oslo_concurrency.lockutils [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.604389] env[68217]: DEBUG nova.network.neutron [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1009.604586] env[68217]: DEBUG nova.objects.instance [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'info_cache' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1009.659979] env[68217]: DEBUG oslo_vmware.api [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961766, 'name': PowerOnVM_Task, 'duration_secs': 0.549445} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.660294] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1009.660521] env[68217]: INFO nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Took 7.65 seconds to spawn the instance on the hypervisor. [ 1009.660706] env[68217]: DEBUG nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1009.661510] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d760f12-807d-4caa-a8a5-56015dc7d38c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.677041] env[68217]: DEBUG oslo_concurrency.lockutils [None req-15a1cbb0-847d-434f-889c-1843e01d7fa8 tempest-ServerDiskConfigTestJSON-253963987 tempest-ServerDiskConfigTestJSON-253963987-project-member] Lock "92f99a45-13a3-48d9-8dbc-4065cc8ee9dc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.892s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1009.687390] env[68217]: DEBUG nova.network.neutron [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1009.761148] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Releasing lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1009.762231] env[68217]: DEBUG nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Instance network_info: |[{"id": "a9ab050e-fd0e-469a-8c0a-a9794739f06b", "address": "fa:16:3e:30:73:93", "network": {"id": "42878174-e5ca-4794-b014-f0564b6f9268", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1527613629", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9ab050e-fd", "ovs_interfaceid": "a9ab050e-fd0e-469a-8c0a-a9794739f06b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "address": "fa:16:3e:4a:e2:54", "network": {"id": "0149085c-6f20-47db-87ac-aa2336e92240", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1636213118", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c6bcfe-54", "ovs_interfaceid": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1009.762671] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] Acquired lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1009.762947] env[68217]: DEBUG nova.network.neutron [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Refreshing network info cache for port 79c6bcfe-54df-47ec-a39b-84e3bba24e55 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1009.764546] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:30:73:93', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'bed837fa-6b6a-4192-a229-a99426a46065', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a9ab050e-fd0e-469a-8c0a-a9794739f06b', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:4a:e2:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c6365036-aa37-44d2-90d1-ca1c3516ded9', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '79c6bcfe-54df-47ec-a39b-84e3bba24e55', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1009.782430] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1009.786795] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1009.787441] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9a5d1fdd-2a9f-446c-8c3a-dfb0e326cd97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.824076] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1009.824076] env[68217]: value = "task-2961768" [ 1009.824076] env[68217]: _type = "Task" [ 1009.824076] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.831753] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961768, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.902179] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.008597] env[68217]: DEBUG oslo_vmware.api [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961767, 'name': PowerOnVM_Task, 'duration_secs': 0.545167} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.008868] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1010.011593] env[68217]: DEBUG nova.compute.manager [None req-3c47b6cf-2eb5-4d16-90ae-ab1eb3212fe6 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1010.012426] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cc73f98-2595-496e-980d-5d7cb1ce4d25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.040091] env[68217]: DEBUG nova.network.neutron [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Updating instance_info_cache with network_info: [{"id": "b5d29b01-b9b6-4d25-9fef-4a335cf05875", "address": "fa:16:3e:64:1c:67", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5d29b01-b9", "ovs_interfaceid": "b5d29b01-b9b6-4d25-9fef-4a335cf05875", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.109137] env[68217]: DEBUG nova.objects.base [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Object Instance<95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f> lazy-loaded attributes: flavor,info_cache {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1010.187221] env[68217]: INFO nova.compute.manager [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Took 29.15 seconds to build instance. [ 1010.207866] env[68217]: DEBUG nova.network.neutron [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Updated VIF entry in instance network info cache for port 79c6bcfe-54df-47ec-a39b-84e3bba24e55. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1010.207866] env[68217]: DEBUG nova.network.neutron [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Updating instance_info_cache with network_info: [{"id": "a9ab050e-fd0e-469a-8c0a-a9794739f06b", "address": "fa:16:3e:30:73:93", "network": {"id": "42878174-e5ca-4794-b014-f0564b6f9268", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1527613629", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.133", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "bed837fa-6b6a-4192-a229-a99426a46065", "external-id": "nsx-vlan-transportzone-954", "segmentation_id": 954, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa9ab050e-fd", "ovs_interfaceid": "a9ab050e-fd0e-469a-8c0a-a9794739f06b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "address": "fa:16:3e:4a:e2:54", "network": {"id": "0149085c-6f20-47db-87ac-aa2336e92240", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1636213118", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c6bcfe-54", "ovs_interfaceid": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.335339] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961768, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.363879] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8526effd-3f13-4314-b343-3eaa9aeb4607 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.371719] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e811d38a-aa88-418b-9fac-0c0052fb87b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.404479] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae710ba8-4761-4eac-a241-bdae16fe907c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.412232] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09c5d56-bef1-464f-a787-fbd3637801cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.425898] env[68217]: DEBUG nova.compute.provider_tree [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1010.544100] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.544100] env[68217]: DEBUG nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Instance network_info: |[{"id": "b5d29b01-b9b6-4d25-9fef-4a335cf05875", "address": "fa:16:3e:64:1c:67", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5d29b01-b9", "ovs_interfaceid": "b5d29b01-b9b6-4d25-9fef-4a335cf05875", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1010.544100] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:1c:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5d29b01-b9b6-4d25-9fef-4a335cf05875', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1010.551630] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1010.552600] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1010.553181] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f1590dcd-a760-4830-95c6-34540f5478ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.578876] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1010.578876] env[68217]: value = "task-2961769" [ 1010.578876] env[68217]: _type = "Task" [ 1010.578876] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.586372] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961769, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.638993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d20ea946-f6a6-4d08-9cbf-a035140a8cca tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "interface-d1fcac61-0d2a-4331-9042-af11c3c36ae4-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1010.639275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d20ea946-f6a6-4d08-9cbf-a035140a8cca tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "interface-d1fcac61-0d2a-4331-9042-af11c3c36ae4-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1010.639594] env[68217]: DEBUG nova.objects.instance [None req-d20ea946-f6a6-4d08-9cbf-a035140a8cca tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lazy-loading 'flavor' on Instance uuid d1fcac61-0d2a-4331-9042-af11c3c36ae4 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1010.689604] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1abb4ed-9be2-49af-be70-21b6751bb0bc tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 30.661s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1010.713447] env[68217]: DEBUG oslo_concurrency.lockutils [req-cd856425-025f-443c-ae70-5b4b1c652074 req-44f71472-2dc4-46de-ac7d-5117fd3a140f service nova] Releasing lock "refresh_cache-34f176e7-f98e-4eda-aee9-45e44d5ffb85" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1010.752820] env[68217]: DEBUG nova.compute.manager [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Received event network-changed-b5d29b01-b9b6-4d25-9fef-4a335cf05875 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1010.753029] env[68217]: DEBUG nova.compute.manager [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Refreshing instance network info cache due to event network-changed-b5d29b01-b9b6-4d25-9fef-4a335cf05875. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1010.753240] env[68217]: DEBUG oslo_concurrency.lockutils [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] Acquiring lock "refresh_cache-df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.753381] env[68217]: DEBUG oslo_concurrency.lockutils [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] Acquired lock "refresh_cache-df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.753546] env[68217]: DEBUG nova.network.neutron [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Refreshing network info cache for port b5d29b01-b9b6-4d25-9fef-4a335cf05875 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1010.835271] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961768, 'name': CreateVM_Task, 'duration_secs': 0.641464} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.835449] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1010.836221] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.836388] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1010.836741] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1010.837020] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11e07e80-d379-4eaa-aeb0-19f0205836d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.842525] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1010.842525] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f736af-7b69-8295-6eb1-96e2b151c2a4" [ 1010.842525] env[68217]: _type = "Task" [ 1010.842525] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.850998] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f736af-7b69-8295-6eb1-96e2b151c2a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.874534] env[68217]: DEBUG nova.network.neutron [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Successfully updated port: c02a5df2-09c6-499e-8c8a-5e198ba3ef9e {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1010.929478] env[68217]: DEBUG nova.scheduler.client.report [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1010.971169] env[68217]: DEBUG nova.network.neutron [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updating instance_info_cache with network_info: [{"id": "86568bc3-8f1e-4880-9a22-48003fc7babd", "address": "fa:16:3e:92:6c:cf", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.206", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap86568bc3-8f", "ovs_interfaceid": "86568bc3-8f1e-4880-9a22-48003fc7babd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.066410] env[68217]: DEBUG oslo_concurrency.lockutils [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1011.068120] env[68217]: DEBUG oslo_concurrency.lockutils [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.089902] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961769, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.143554] env[68217]: DEBUG nova.objects.instance [None req-d20ea946-f6a6-4d08-9cbf-a035140a8cca tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lazy-loading 'pci_requests' on Instance uuid d1fcac61-0d2a-4331-9042-af11c3c36ae4 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.352441] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f736af-7b69-8295-6eb1-96e2b151c2a4, 'name': SearchDatastore_Task, 'duration_secs': 0.012876} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.352733] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.352984] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1011.353235] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.353383] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.353573] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1011.353833] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fb06afc5-79dd-4943-ba2d-75465ce3699f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.370159] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1011.370340] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1011.374514] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2a3be69-f327-4f77-9b30-a78a76344f91 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.379874] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "refresh_cache-33802025-7f72-4ad9-80fe-b15196b1a577" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.379874] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquired lock "refresh_cache-33802025-7f72-4ad9-80fe-b15196b1a577" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.379874] env[68217]: DEBUG nova.network.neutron [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1011.381953] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1011.381953] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523e4f1d-5c66-8a7a-6857-46afb8ec598b" [ 1011.381953] env[68217]: _type = "Task" [ 1011.381953] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.393682] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523e4f1d-5c66-8a7a-6857-46afb8ec598b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.435044] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.320s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.435545] env[68217]: DEBUG nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1011.438083] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.536s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1011.438303] env[68217]: DEBUG nova.objects.instance [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lazy-loading 'resources' on Instance uuid 66ca9efd-1839-4e98-b006-5fc3adda375d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1011.448575] env[68217]: DEBUG nova.network.neutron [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Updated VIF entry in instance network info cache for port b5d29b01-b9b6-4d25-9fef-4a335cf05875. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1011.448971] env[68217]: DEBUG nova.network.neutron [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Updating instance_info_cache with network_info: [{"id": "b5d29b01-b9b6-4d25-9fef-4a335cf05875", "address": "fa:16:3e:64:1c:67", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb5d29b01-b9", "ovs_interfaceid": "b5d29b01-b9b6-4d25-9fef-4a335cf05875", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1011.474014] env[68217]: DEBUG oslo_concurrency.lockutils [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "refresh_cache-95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1011.574256] env[68217]: DEBUG nova.compute.utils [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.589020] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961769, 'name': CreateVM_Task, 'duration_secs': 0.54674} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.589933] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1011.590793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1011.591135] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1011.591583] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1011.592942] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-54379fa2-86d9-4b99-8169-32f5e892b49b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.597784] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1011.597784] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525b6373-a929-6688-7783-073062e09ee4" [ 1011.597784] env[68217]: _type = "Task" [ 1011.597784] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.606870] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525b6373-a929-6688-7783-073062e09ee4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.645750] env[68217]: DEBUG nova.objects.base [None req-d20ea946-f6a6-4d08-9cbf-a035140a8cca tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Object Instance lazy-loaded attributes: flavor,pci_requests {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1011.645952] env[68217]: DEBUG nova.network.neutron [None req-d20ea946-f6a6-4d08-9cbf-a035140a8cca tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.749519] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d20ea946-f6a6-4d08-9cbf-a035140a8cca tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "interface-d1fcac61-0d2a-4331-9042-af11c3c36ae4-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.110s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1011.897561] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523e4f1d-5c66-8a7a-6857-46afb8ec598b, 'name': SearchDatastore_Task, 'duration_secs': 0.031263} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.898471] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-464f6b50-c163-4f94-b09e-9e545d622b13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.903993] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1011.903993] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f3130a-0bf5-aa4a-0a57-d0edeb437e73" [ 1011.903993] env[68217]: _type = "Task" [ 1011.903993] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.913944] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f3130a-0bf5-aa4a-0a57-d0edeb437e73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.936169] env[68217]: DEBUG nova.network.neutron [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1011.941089] env[68217]: DEBUG nova.compute.utils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1011.942605] env[68217]: DEBUG nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1011.943033] env[68217]: DEBUG nova.network.neutron [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1011.951421] env[68217]: DEBUG oslo_concurrency.lockutils [req-778329f2-d245-4dca-abd7-8ab092a46e21 req-1fe2d536-aaf2-4732-9404-010e3942598c service nova] Releasing lock "refresh_cache-df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.019920] env[68217]: DEBUG nova.policy [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '554b6b3d22404c0ba52c739b3c7b98a9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8fb62d18446841a3b2a6ac25ab5dc869', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1012.077476] env[68217]: DEBUG oslo_concurrency.lockutils [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.011s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1012.119114] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525b6373-a929-6688-7783-073062e09ee4, 'name': SearchDatastore_Task, 'duration_secs': 0.0139} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.119114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.119114] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1012.119114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.119114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1012.119114] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1012.119114] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-71b67006-d8de-4fd0-9850-a43ea454de6e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.136256] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1012.136495] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1012.140412] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4117cd6-fa9e-4f1d-b4fe-ac76b6f25092 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.147016] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1012.147016] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52119c63-a0ed-211d-330e-de450c37ba0a" [ 1012.147016] env[68217]: _type = "Task" [ 1012.147016] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.155772] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52119c63-a0ed-211d-330e-de450c37ba0a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.165102] env[68217]: DEBUG nova.network.neutron [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Updating instance_info_cache with network_info: [{"id": "c02a5df2-09c6-499e-8c8a-5e198ba3ef9e", "address": "fa:16:3e:fa:13:e5", "network": {"id": "73be8816-3443-47f9-9a15-8996195f7f6d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-684004022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf28bebd84964101a508b11ddc924552", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc02a5df2-09", "ovs_interfaceid": "c02a5df2-09c6-499e-8c8a-5e198ba3ef9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.232809] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82609057-25f4-4f26-bcaf-c95928c3964a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.241840] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-730124f3-b13b-4e47-acf3-0a68c1e90b80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.281935] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a045102c-c267-4809-b55b-31547e71509f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.292787] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8511358-6d98-4651-accd-97948a189d07 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.311692] env[68217]: DEBUG nova.compute.provider_tree [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1012.373160] env[68217]: DEBUG nova.network.neutron [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Successfully created port: a112e6da-1e76-4618-b45e-229cbb5c0ebd {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1012.416957] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f3130a-0bf5-aa4a-0a57-d0edeb437e73, 'name': SearchDatastore_Task, 'duration_secs': 0.010198} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.417368] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.417649] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 34f176e7-f98e-4eda-aee9-45e44d5ffb85/34f176e7-f98e-4eda-aee9-45e44d5ffb85.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.417923] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-edff589b-58fb-4ac3-b88d-a8bef64de5a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.425268] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1012.425268] env[68217]: value = "task-2961770" [ 1012.425268] env[68217]: _type = "Task" [ 1012.425268] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.433700] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961770, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.449741] env[68217]: DEBUG nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1012.479396] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1012.479724] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a130dfdf-d52b-40a5-bde5-0596631c67d3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.488065] env[68217]: DEBUG oslo_vmware.api [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1012.488065] env[68217]: value = "task-2961771" [ 1012.488065] env[68217]: _type = "Task" [ 1012.488065] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.496827] env[68217]: DEBUG oslo_vmware.api [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961771, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.668069] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52119c63-a0ed-211d-330e-de450c37ba0a, 'name': SearchDatastore_Task, 'duration_secs': 0.029201} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.668069] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Releasing lock "refresh_cache-33802025-7f72-4ad9-80fe-b15196b1a577" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.668174] env[68217]: DEBUG nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Instance network_info: |[{"id": "c02a5df2-09c6-499e-8c8a-5e198ba3ef9e", "address": "fa:16:3e:fa:13:e5", "network": {"id": "73be8816-3443-47f9-9a15-8996195f7f6d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-684004022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf28bebd84964101a508b11ddc924552", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc02a5df2-09", "ovs_interfaceid": "c02a5df2-09c6-499e-8c8a-5e198ba3ef9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1012.670235] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:13:e5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b00fe87c-d828-442f-bd09-e9018c468557', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c02a5df2-09c6-499e-8c8a-5e198ba3ef9e', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1012.678297] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Creating folder: Project (cf28bebd84964101a508b11ddc924552). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1012.678628] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8815112c-f900-4516-83e2-e8cdb530e726 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.683181] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ca157a71-54ec-4cde-b48a-658a454e6821 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.691952] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1012.691952] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520395d2-6327-1eab-079b-80863c09d2d4" [ 1012.691952] env[68217]: _type = "Task" [ 1012.691952] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.700189] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Created folder: Project (cf28bebd84964101a508b11ddc924552) in parent group-v594094. [ 1012.701034] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Creating folder: Instances. Parent ref: group-v594363. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1012.701181] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0e7f526c-adda-4bba-87db-671584f56aa2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.708486] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520395d2-6327-1eab-079b-80863c09d2d4, 'name': SearchDatastore_Task, 'duration_secs': 0.013477} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.710157] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1012.710357] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1012.710709] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-aa033aaa-f5b2-440b-8ee3-edde58971703 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.718089] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Created folder: Instances in parent group-v594363. [ 1012.718364] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1012.719859] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1012.720114] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1012.720114] env[68217]: value = "task-2961774" [ 1012.720114] env[68217]: _type = "Task" [ 1012.720114] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.720329] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e1784916-f0ee-4c8a-8dba-94dac7110a45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.744575] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1012.744575] env[68217]: value = "task-2961775" [ 1012.744575] env[68217]: _type = "Task" [ 1012.744575] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.748011] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961774, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.757739] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961775, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.815949] env[68217]: DEBUG nova.scheduler.client.report [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1012.937870] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961770, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.002481] env[68217]: DEBUG oslo_vmware.api [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961771, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.159468] env[68217]: DEBUG oslo_concurrency.lockutils [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.159783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.159981] env[68217]: INFO nova.compute.manager [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Attaching volume 510a8087-136f-46d8-91cf-d7ca59a54445 to /dev/sdb [ 1013.217239] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b151ef3-17b2-4ad6-993c-6eedcd68b867 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.227024] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-429ba94d-c431-40d5-b84d-7c7b4f952df5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.245704] env[68217]: DEBUG nova.virt.block_device [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Updating existing volume attachment record: f6da112c-d351-4245-a99a-27179b32beee {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1013.253562] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961774, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.264896] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961775, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.322358] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.884s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.357490] env[68217]: DEBUG nova.compute.manager [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Received event network-vif-plugged-c02a5df2-09c6-499e-8c8a-5e198ba3ef9e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1013.357726] env[68217]: DEBUG oslo_concurrency.lockutils [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] Acquiring lock "33802025-7f72-4ad9-80fe-b15196b1a577-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.358038] env[68217]: DEBUG oslo_concurrency.lockutils [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] Lock "33802025-7f72-4ad9-80fe-b15196b1a577-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.358187] env[68217]: DEBUG oslo_concurrency.lockutils [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] Lock "33802025-7f72-4ad9-80fe-b15196b1a577-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.358475] env[68217]: DEBUG nova.compute.manager [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] No waiting events found dispatching network-vif-plugged-c02a5df2-09c6-499e-8c8a-5e198ba3ef9e {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1013.358541] env[68217]: WARNING nova.compute.manager [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Received unexpected event network-vif-plugged-c02a5df2-09c6-499e-8c8a-5e198ba3ef9e for instance with vm_state building and task_state spawning. [ 1013.358695] env[68217]: DEBUG nova.compute.manager [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Received event network-changed-c02a5df2-09c6-499e-8c8a-5e198ba3ef9e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1013.358861] env[68217]: DEBUG nova.compute.manager [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Refreshing instance network info cache due to event network-changed-c02a5df2-09c6-499e-8c8a-5e198ba3ef9e. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1013.359050] env[68217]: DEBUG oslo_concurrency.lockutils [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] Acquiring lock "refresh_cache-33802025-7f72-4ad9-80fe-b15196b1a577" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.359219] env[68217]: DEBUG oslo_concurrency.lockutils [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] Acquired lock "refresh_cache-33802025-7f72-4ad9-80fe-b15196b1a577" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.359384] env[68217]: DEBUG nova.network.neutron [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Refreshing network info cache for port c02a5df2-09c6-499e-8c8a-5e198ba3ef9e {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1013.361521] env[68217]: INFO nova.scheduler.client.report [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted allocations for instance 66ca9efd-1839-4e98-b006-5fc3adda375d [ 1013.440023] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961770, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.671691} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.441803] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 34f176e7-f98e-4eda-aee9-45e44d5ffb85/34f176e7-f98e-4eda-aee9-45e44d5ffb85.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.442174] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.449629] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1612e461-cebd-4ebd-a5ea-04d7cb74d974 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.459366] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1013.459366] env[68217]: value = "task-2961777" [ 1013.459366] env[68217]: _type = "Task" [ 1013.459366] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.460410] env[68217]: DEBUG nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1013.477102] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961777, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.495318] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1013.495596] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1013.495772] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1013.495964] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1013.496141] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1013.496290] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1013.496512] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1013.496674] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1013.497097] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1013.497286] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1013.497498] env[68217]: DEBUG nova.virt.hardware [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1013.498383] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa204842-1c36-42c1-be03-653a102a36f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.505958] env[68217]: DEBUG oslo_vmware.api [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961771, 'name': PowerOnVM_Task, 'duration_secs': 0.563718} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.508334] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.508541] env[68217]: DEBUG nova.compute.manager [None req-694e6cf2-73e1-423b-8371-6e54c1ec428f tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1013.509383] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71e7a45e-b6f4-4346-ba04-e4b38a8915f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.513502] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee54b683-bd9e-4c60-a2b1-f5cfc13ba15b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.710889] env[68217]: INFO nova.compute.manager [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Rescuing [ 1013.711634] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.711634] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.711634] env[68217]: DEBUG nova.network.neutron [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1013.747741] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961774, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.761693} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.748054] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1013.748366] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1013.748702] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-9b4154ac-b9ae-4694-82ea-51d4550ba87b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.758966] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1013.758966] env[68217]: value = "task-2961780" [ 1013.758966] env[68217]: _type = "Task" [ 1013.758966] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.765942] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961775, 'name': CreateVM_Task, 'duration_secs': 0.590636} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.766430] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1013.767147] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1013.767313] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1013.767623] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1013.770446] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-893942e9-d69a-45ee-9861-2fcc9b0b05cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.772111] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961780, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.774886] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1013.774886] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fac5fd-b30d-bbbf-dbea-496098df5861" [ 1013.774886] env[68217]: _type = "Task" [ 1013.774886] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.783699] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fac5fd-b30d-bbbf-dbea-496098df5861, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.872144] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b1cd89c4-d9dc-42a1-9d27-567a0ddd523b tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "66ca9efd-1839-4e98-b006-5fc3adda375d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.415s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.928125] env[68217]: DEBUG nova.network.neutron [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Successfully updated port: a112e6da-1e76-4618-b45e-229cbb5c0ebd {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1013.947254] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.947630] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.947912] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1013.948214] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1013.948464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1013.951420] env[68217]: INFO nova.compute.manager [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Terminating instance [ 1013.977102] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961777, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.302022} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.977546] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1013.978694] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85341783-f9cf-4153-bc39-15f079ff0caf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.013532] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 34f176e7-f98e-4eda-aee9-45e44d5ffb85/34f176e7-f98e-4eda-aee9-45e44d5ffb85.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.017163] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb00e3ad-cf5f-4bd9-88ae-5bdf727797cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.045819] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1014.045819] env[68217]: value = "task-2961781" [ 1014.045819] env[68217]: _type = "Task" [ 1014.045819] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.059598] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961781, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.270308] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961780, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071249} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.274334] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1014.274942] env[68217]: DEBUG nova.network.neutron [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Updated VIF entry in instance network info cache for port c02a5df2-09c6-499e-8c8a-5e198ba3ef9e. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1014.275902] env[68217]: DEBUG nova.network.neutron [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Updating instance_info_cache with network_info: [{"id": "c02a5df2-09c6-499e-8c8a-5e198ba3ef9e", "address": "fa:16:3e:fa:13:e5", "network": {"id": "73be8816-3443-47f9-9a15-8996195f7f6d", "bridge": "br-int", "label": "tempest-ServerMetadataNegativeTestJSON-684004022-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "cf28bebd84964101a508b11ddc924552", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b00fe87c-d828-442f-bd09-e9018c468557", "external-id": "nsx-vlan-transportzone-7", "segmentation_id": 7, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc02a5df2-09", "ovs_interfaceid": "c02a5df2-09c6-499e-8c8a-5e198ba3ef9e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.279048] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e01b768-a84b-4454-80d0-213089d1f947 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.295163] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fac5fd-b30d-bbbf-dbea-496098df5861, 'name': SearchDatastore_Task, 'duration_secs': 0.015382} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.305033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.305300] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1014.305528] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.305674] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.305978] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1014.317589] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Reconfiguring VM instance instance-00000061 to attach disk [datastore2] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1014.318243] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6420a526-76a1-4f1c-9148-49cbe8bcf2a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.320411] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6f6135cd-bd6e-4033-9d70-881098cead00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.337394] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.338607] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.344032] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c9808e-9dfe-69b6-e164-ebf977e60187/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1014.344960] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1051f708-2b68-4a1a-b320-4c6ca055aa43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.349749] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1014.349749] env[68217]: value = "task-2961782" [ 1014.349749] env[68217]: _type = "Task" [ 1014.349749] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.349948] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1014.350123] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1014.351227] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3c9ffda9-7f5f-43e4-91ed-9aa127496a02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.357814] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c9808e-9dfe-69b6-e164-ebf977e60187/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1014.358031] env[68217]: ERROR oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c9808e-9dfe-69b6-e164-ebf977e60187/disk-0.vmdk due to incomplete transfer. [ 1014.358591] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-6aced758-5d7c-4766-9857-eb81a163f6c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.361355] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1014.361355] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5278e77a-c6ee-ec41-181f-da20956363d3" [ 1014.361355] env[68217]: _type = "Task" [ 1014.361355] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.371541] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961782, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.373566] env[68217]: DEBUG oslo_vmware.rw_handles [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c9808e-9dfe-69b6-e164-ebf977e60187/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1014.373768] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Uploaded image 9ee6281c-21fa-40a7-ae36-cd54a57dfaea to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1014.376171] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1014.379922] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-f31ca153-1944-4f77-b3b7-dc3d81213c81 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.381440] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5278e77a-c6ee-ec41-181f-da20956363d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.386833] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1014.386833] env[68217]: value = "task-2961783" [ 1014.386833] env[68217]: _type = "Task" [ 1014.386833] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.395313] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961783, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.430318] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1014.430318] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1014.430318] env[68217]: DEBUG nova.network.neutron [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1014.455745] env[68217]: DEBUG nova.compute.manager [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1014.455969] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1014.457033] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4196abd-5fe5-4d61-a023-24f4e49f94bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.465662] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1014.465955] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-917a1cda-814b-43ff-a692-acb4acc5a3c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.471860] env[68217]: DEBUG oslo_vmware.api [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1014.471860] env[68217]: value = "task-2961784" [ 1014.471860] env[68217]: _type = "Task" [ 1014.471860] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.479972] env[68217]: DEBUG oslo_vmware.api [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961784, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.558033] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961781, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.575681] env[68217]: DEBUG nova.network.neutron [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Updating instance_info_cache with network_info: [{"id": "46e4edb9-72c4-4a7d-af91-4b553d829391", "address": "fa:16:3e:bb:64:fc", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e4edb9-72", "ovs_interfaceid": "46e4edb9-72c4-4a7d-af91-4b553d829391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1014.779433] env[68217]: DEBUG oslo_concurrency.lockutils [req-e721875c-045d-472b-a16a-47acc8ff405c req-cb335f00-546e-450f-8a37-66df9e61df4f service nova] Releasing lock "refresh_cache-33802025-7f72-4ad9-80fe-b15196b1a577" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1014.849327] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.849699] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.849970] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.850261] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.851223] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.852215] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.852460] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1014.858025] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1014.868211] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961782, 'name': ReconfigVM_Task, 'duration_secs': 0.461145} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.871599] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Reconfigured VM instance instance-00000061 to attach disk [datastore2] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1014.873397] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f321bd3b-7c9a-4c1e-a643-4316c08f095c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.880743] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5278e77a-c6ee-ec41-181f-da20956363d3, 'name': SearchDatastore_Task, 'duration_secs': 0.034696} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.882939] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1014.882939] env[68217]: value = "task-2961785" [ 1014.882939] env[68217]: _type = "Task" [ 1014.882939] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.883128] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7b016554-e2b1-427e-b247-ac5e9da3c76c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.895551] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1014.895551] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5271808e-9168-5690-a65a-b4564e3e9990" [ 1014.895551] env[68217]: _type = "Task" [ 1014.895551] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.906535] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961785, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.912796] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961783, 'name': Destroy_Task, 'duration_secs': 0.455558} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.913362] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5271808e-9168-5690-a65a-b4564e3e9990, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.914047] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Destroyed the VM [ 1014.914325] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1014.914640] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-2e8342b3-b2c1-4e36-b555-f276bd19d777 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.924538] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1014.924538] env[68217]: value = "task-2961786" [ 1014.924538] env[68217]: _type = "Task" [ 1014.924538] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1014.959342] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961786, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1014.963667] env[68217]: DEBUG nova.network.neutron [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1014.982199] env[68217]: DEBUG oslo_vmware.api [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961784, 'name': PowerOffVM_Task, 'duration_secs': 0.207018} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1014.985149] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1014.985350] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1014.986780] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5a3de43a-6baa-4e8e-bf74-62afef648dee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.047020] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1015.047020] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1015.047020] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Deleting the datastore file [datastore1] d1fcac61-0d2a-4331-9042-af11c3c36ae4 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1015.047020] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1e0cc4f4-f076-4ec8-a968-fdec91307d02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.057141] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961781, 'name': ReconfigVM_Task, 'duration_secs': 0.561458} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.058733] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 34f176e7-f98e-4eda-aee9-45e44d5ffb85/34f176e7-f98e-4eda-aee9-45e44d5ffb85.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1015.059505] env[68217]: DEBUG oslo_vmware.api [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for the task: (returnval){ [ 1015.059505] env[68217]: value = "task-2961788" [ 1015.059505] env[68217]: _type = "Task" [ 1015.059505] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.059763] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0dfb003e-81d6-48b2-829c-151245be66a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.069077] env[68217]: DEBUG oslo_vmware.api [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961788, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.070320] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1015.070320] env[68217]: value = "task-2961789" [ 1015.070320] env[68217]: _type = "Task" [ 1015.070320] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.081157] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.083203] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961789, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.113731] env[68217]: DEBUG nova.network.neutron [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance_info_cache with network_info: [{"id": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "address": "fa:16:3e:43:19:4b", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa112e6da-1e", "ovs_interfaceid": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1015.147242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "105e6181-19c4-466b-88a0-cdbca2cac230" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.147503] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "105e6181-19c4-466b-88a0-cdbca2cac230" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.147730] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "105e6181-19c4-466b-88a0-cdbca2cac230-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.147977] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "105e6181-19c4-466b-88a0-cdbca2cac230-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.148162] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "105e6181-19c4-466b-88a0-cdbca2cac230-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.150677] env[68217]: INFO nova.compute.manager [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Terminating instance [ 1015.363045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.363045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.363045] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.363045] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1015.363838] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f10424d-f2c4-47ef-8dab-d9f424f74e18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.374344] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d21c0d4e-a1d0-4890-9d67-8b2145b11de8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.394633] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47ee02b8-106b-42d0-9d13-36439accc0f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.406190] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961785, 'name': Rename_Task, 'duration_secs': 0.162404} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.408654] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.409391] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-51c67e14-7564-400c-a800-9ba642dadc8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.411979] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1c306c2-1e4b-42a0-9070-fd9fa95e24a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.421255] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5271808e-9168-5690-a65a-b4564e3e9990, 'name': SearchDatastore_Task, 'duration_secs': 0.017471} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.422151] env[68217]: DEBUG nova.compute.manager [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Received event network-vif-plugged-a112e6da-1e76-4618-b45e-229cbb5c0ebd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.422255] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] Acquiring lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.422429] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.422609] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1015.422777] env[68217]: DEBUG nova.compute.manager [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] No waiting events found dispatching network-vif-plugged-a112e6da-1e76-4618-b45e-229cbb5c0ebd {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1015.422940] env[68217]: WARNING nova.compute.manager [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Received unexpected event network-vif-plugged-a112e6da-1e76-4618-b45e-229cbb5c0ebd for instance with vm_state building and task_state spawning. [ 1015.423109] env[68217]: DEBUG nova.compute.manager [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Received event network-changed-a112e6da-1e76-4618-b45e-229cbb5c0ebd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1015.423263] env[68217]: DEBUG nova.compute.manager [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Refreshing instance network info cache due to event network-changed-a112e6da-1e76-4618-b45e-229cbb5c0ebd. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1015.423426] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] Acquiring lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1015.424305] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.424558] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 33802025-7f72-4ad9-80fe-b15196b1a577/33802025-7f72-4ad9-80fe-b15196b1a577.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1015.425173] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97bce02b-4ceb-4289-b084-4e65af338e79 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.457449] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180063MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1015.457449] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1015.457643] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1015.462083] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1015.462083] env[68217]: value = "task-2961790" [ 1015.462083] env[68217]: _type = "Task" [ 1015.462083] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.469571] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961786, 'name': RemoveSnapshot_Task} progress is 17%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.470951] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1015.470951] env[68217]: value = "task-2961791" [ 1015.470951] env[68217]: _type = "Task" [ 1015.470951] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.480224] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961790, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.485105] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.573017] env[68217]: DEBUG oslo_vmware.api [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Task: {'id': task-2961788, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.149596} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.573017] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1015.573017] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1015.573017] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1015.573017] env[68217]: INFO nova.compute.manager [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Took 1.12 seconds to destroy the instance on the hypervisor. [ 1015.573017] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1015.573017] env[68217]: DEBUG nova.compute.manager [-] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1015.573017] env[68217]: DEBUG nova.network.neutron [-] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1015.586319] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961789, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.618565] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1015.618722] env[68217]: DEBUG nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Instance network_info: |[{"id": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "address": "fa:16:3e:43:19:4b", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa112e6da-1e", "ovs_interfaceid": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1015.619420] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] Acquired lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1015.619420] env[68217]: DEBUG nova.network.neutron [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Refreshing network info cache for port a112e6da-1e76-4618-b45e-229cbb5c0ebd {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1015.620291] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:43:19:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '02bbcead-d833-4543-bec6-fb82dfe659ff', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a112e6da-1e76-4618-b45e-229cbb5c0ebd', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1015.628300] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1015.628805] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1015.629886] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0c6ca621-6296-429c-8c7a-5c2cf0cb9f19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.654020] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1015.654020] env[68217]: value = "task-2961792" [ 1015.654020] env[68217]: _type = "Task" [ 1015.654020] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.654357] env[68217]: DEBUG nova.compute.manager [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1015.654473] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.655816] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711d0593-d401-4f2c-96e5-7be6bfe0449e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.664630] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961792, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.665979] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1015.666255] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2461b25a-52e1-4e04-b0a8-50ce7e8965e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.672970] env[68217]: DEBUG oslo_vmware.api [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1015.672970] env[68217]: value = "task-2961793" [ 1015.672970] env[68217]: _type = "Task" [ 1015.672970] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.684032] env[68217]: DEBUG oslo_vmware.api [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.935945] env[68217]: DEBUG oslo_vmware.api [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961786, 'name': RemoveSnapshot_Task, 'duration_secs': 0.602675} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1015.936187] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1015.936420] env[68217]: INFO nova.compute.manager [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Took 19.22 seconds to snapshot the instance on the hypervisor. [ 1015.991833] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961790, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.992068] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961791, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.084686] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961789, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.161175] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961792, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.186588] env[68217]: DEBUG oslo_vmware.api [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961793, 'name': PowerOffVM_Task, 'duration_secs': 0.216647} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.186930] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1016.187131] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1016.187395] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ab641463-e152-4f2d-a064-7011ea0d775b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.265122] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1016.265122] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1016.265122] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleting the datastore file [datastore2] 105e6181-19c4-466b-88a0-cdbca2cac230 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1016.265122] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa7b946c-1ecc-4e28-8f32-4c9894a91d6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.272199] env[68217]: DEBUG oslo_vmware.api [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1016.272199] env[68217]: value = "task-2961796" [ 1016.272199] env[68217]: _type = "Task" [ 1016.272199] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.284843] env[68217]: DEBUG oslo_vmware.api [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.496024] env[68217]: DEBUG oslo_vmware.api [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961790, 'name': PowerOnVM_Task, 'duration_secs': 0.567814} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.497677] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.498083] env[68217]: INFO nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Took 9.73 seconds to spawn the instance on the hypervisor. [ 1016.498374] env[68217]: DEBUG nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1016.498778] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961791, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.631567} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.499842] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe0b0b44-7ffd-4bdf-b649-6a37bd04f7da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.502733] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 33802025-7f72-4ad9-80fe-b15196b1a577/33802025-7f72-4ad9-80fe-b15196b1a577.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1016.503119] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1016.503512] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c8bfbf14-cafc-4436-a683-3054b063e22a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.516537] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.516888] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.517155] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.517371] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 3af571ce-c400-45a1-97ad-4fbd53395129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.517580] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 04149a5c-d1b5-4d71-a1ca-44696506a40d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.517784] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 776798bf-1ad4-4acb-ac58-cacc5493e1c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 105e6181-19c4-466b-88a0-cdbca2cac230 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d14026b1-84dd-430e-be94-94dcb1f47473 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d1fcac61-0d2a-4331-9042-af11c3c36ae4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 34f176e7-f98e-4eda-aee9-45e44d5ffb85 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance df4c3a34-2dea-4f82-9ea6-7a9eb1c03179 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 33802025-7f72-4ad9-80fe-b15196b1a577 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance ffff4cf4-f663-4965-84d1-8351bfde1252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1016.520020] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1016.525863] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1016.525863] env[68217]: value = "task-2961797" [ 1016.525863] env[68217]: _type = "Task" [ 1016.525863] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.537051] env[68217]: DEBUG nova.compute.manager [None req-a9ad1288-55e3-4206-a200-3a85e9fd29df tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Found 2 images (rotation: 2) {{(pid=68217) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1016.542937] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961797, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.588126] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961789, 'name': Rename_Task, 'duration_secs': 1.161229} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.590466] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1016.591324] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6945e98c-bf9c-4243-81b1-3b19d12bf7f5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.599082] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1016.599082] env[68217]: value = "task-2961798" [ 1016.599082] env[68217]: _type = "Task" [ 1016.599082] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.614094] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961798, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.639196] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1016.639196] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-78a82b35-c98b-4a21-95f3-00f7e03caadc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.645022] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1016.645022] env[68217]: value = "task-2961799" [ 1016.645022] env[68217]: _type = "Task" [ 1016.645022] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.651477] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961799, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.667060] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961792, 'name': CreateVM_Task, 'duration_secs': 0.876969} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.667060] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1016.667474] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1016.667772] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1016.668164] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1016.670722] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09c7c0d6-b07f-438c-9b49-c64c081d71a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.678018] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1016.678018] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527b480c-1e59-4b21-fe66-3335118b5564" [ 1016.678018] env[68217]: _type = "Task" [ 1016.678018] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.683908] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527b480c-1e59-4b21-fe66-3335118b5564, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.782557] env[68217]: DEBUG oslo_vmware.api [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.797305] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e551b3a-b2b1-4454-926c-b9a2a0e412f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.804720] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-000a37f1-e37f-4bf7-b09f-9ddd4324777f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.810266] env[68217]: DEBUG nova.network.neutron [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updated VIF entry in instance network info cache for port a112e6da-1e76-4618-b45e-229cbb5c0ebd. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1016.810266] env[68217]: DEBUG nova.network.neutron [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance_info_cache with network_info: [{"id": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "address": "fa:16:3e:43:19:4b", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa112e6da-1e", "ovs_interfaceid": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1016.842249] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d73b78-7ee0-423f-beb7-4190af62866b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.850170] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84852feb-44ce-4333-b592-a8575cd8fa52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.869564] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.934683] env[68217]: DEBUG nova.network.neutron [-] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.029071] env[68217]: INFO nova.compute.manager [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Took 32.71 seconds to build instance. [ 1017.042428] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961797, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065213} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.042988] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1017.044186] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-212f7569-c62d-4cd5-b9c2-0b012632d007 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.070487] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Reconfiguring VM instance instance-00000062 to attach disk [datastore1] 33802025-7f72-4ad9-80fe-b15196b1a577/33802025-7f72-4ad9-80fe-b15196b1a577.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.071844] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-77c4a345-38e9-4f42-a236-009677df2533 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.097020] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1017.097020] env[68217]: value = "task-2961800" [ 1017.097020] env[68217]: _type = "Task" [ 1017.097020] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.107035] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961800, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.115487] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961798, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.153129] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961799, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.186414] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527b480c-1e59-4b21-fe66-3335118b5564, 'name': SearchDatastore_Task, 'duration_secs': 0.017261} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.187089] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.187391] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1017.187669] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1017.187786] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1017.187967] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1017.188255] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-247c99dd-35b1-4a12-8965-9d065f00eafd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.198834] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1017.199028] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1017.199845] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94a9f25c-6743-4ab3-9489-871a54568755 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.206462] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1017.206462] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5290871c-cedc-e062-5b40-70838c60f468" [ 1017.206462] env[68217]: _type = "Task" [ 1017.206462] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.214822] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5290871c-cedc-e062-5b40-70838c60f468, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.284025] env[68217]: DEBUG oslo_vmware.api [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.618458} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.284223] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1017.284401] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1017.284575] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1017.284745] env[68217]: INFO nova.compute.manager [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1017.284986] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1017.285207] env[68217]: DEBUG nova.compute.manager [-] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1017.285345] env[68217]: DEBUG nova.network.neutron [-] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1017.312935] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad274fac-d144-419c-a44c-79adaffe5691 req-824d734b-516b-4608-83e3-188b0e137c5f service nova] Releasing lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1017.373008] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1017.437539] env[68217]: INFO nova.compute.manager [-] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Took 1.86 seconds to deallocate network for instance. [ 1017.531634] env[68217]: DEBUG oslo_concurrency.lockutils [None req-388f51a5-8a7a-463b-b282-a2c2dcba0855 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.237s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.537245] env[68217]: DEBUG nova.compute.manager [req-fcf23d54-cb7f-4464-806e-d92b4eeefb71 req-ed474d80-0bd7-479a-9db0-c67283b24d1b service nova] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Received event network-vif-deleted-e30969ad-58d0-4513-8a14-98c33a9ff504 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1017.613019] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961800, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.619034] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961798, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.647341] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.648025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.659311] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961799, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.717490] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5290871c-cedc-e062-5b40-70838c60f468, 'name': SearchDatastore_Task, 'duration_secs': 0.014829} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1017.719144] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-411d2944-1407-4505-9ec2-12fe7bad9a27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.726423] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1017.726423] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52181709-d4c1-0b77-052f-08e318c08825" [ 1017.726423] env[68217]: _type = "Task" [ 1017.726423] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.733759] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52181709-d4c1-0b77-052f-08e318c08825, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.805297] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1017.805625] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594367', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'name': 'volume-510a8087-136f-46d8-91cf-d7ca59a54445', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '776798bf-1ad4-4acb-ac58-cacc5493e1c7', 'attached_at': '', 'detached_at': '', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'serial': '510a8087-136f-46d8-91cf-d7ca59a54445'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1017.806570] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bba883fc-2813-4f2e-9d05-e4e10d9913d3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.822904] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1ae6bb6-28df-4374-a49a-7f175af726c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.825790] env[68217]: DEBUG nova.compute.manager [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1017.826553] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80061d05-71e7-4a90-9482-d7a5cc996443 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.853047] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Reconfiguring VM instance instance-00000057 to attach disk [datastore1] volume-510a8087-136f-46d8-91cf-d7ca59a54445/volume-510a8087-136f-46d8-91cf-d7ca59a54445.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1017.855024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe8dfac0-fd91-4be5-8b06-0bbb9862026f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.875686] env[68217]: DEBUG oslo_vmware.api [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1017.875686] env[68217]: value = "task-2961801" [ 1017.875686] env[68217]: _type = "Task" [ 1017.875686] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1017.881200] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1017.881382] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.424s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1017.886685] env[68217]: DEBUG oslo_vmware.api [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961801, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1017.946042] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1017.946351] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1017.946575] env[68217]: DEBUG nova.objects.instance [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lazy-loading 'resources' on Instance uuid d1fcac61-0d2a-4331-9042-af11c3c36ae4 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.007455] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.007703] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1018.007952] env[68217]: DEBUG nova.compute.manager [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.008885] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d88204e-760e-4afd-81cb-53444bd2a8d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.015827] env[68217]: DEBUG nova.compute.manager [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1018.016387] env[68217]: DEBUG nova.objects.instance [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'flavor' on Instance uuid df4c3a34-2dea-4f82-9ea6-7a9eb1c03179 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.106260] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961800, 'name': ReconfigVM_Task, 'duration_secs': 0.754682} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.106566] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Reconfigured VM instance instance-00000062 to attach disk [datastore1] 33802025-7f72-4ad9-80fe-b15196b1a577/33802025-7f72-4ad9-80fe-b15196b1a577.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.109984] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4e0923a9-c078-4bcd-a4c5-f5c57e84543a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.116965] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961798, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.117830] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1018.117830] env[68217]: value = "task-2961802" [ 1018.117830] env[68217]: _type = "Task" [ 1018.117830] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.125482] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961802, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.154294] env[68217]: DEBUG nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1018.156971] env[68217]: DEBUG nova.network.neutron [-] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1018.163507] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961799, 'name': PowerOffVM_Task, 'duration_secs': 1.166717} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.163507] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1018.163886] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c098884-4cb2-4675-af9d-b600abd9314e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.183755] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7a782d0-61e4-4e70-a1b5-8c6dd57362d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.221542] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1018.221837] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3123c622-7015-48f7-8e14-d48de866e5f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.231113] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1018.231113] env[68217]: value = "task-2961803" [ 1018.231113] env[68217]: _type = "Task" [ 1018.231113] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.241223] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52181709-d4c1-0b77-052f-08e318c08825, 'name': SearchDatastore_Task, 'duration_secs': 0.010759} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.242096] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.242492] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ffff4cf4-f663-4965-84d1-8351bfde1252/ffff4cf4-f663-4965-84d1-8351bfde1252.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1018.242882] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8f2b1ad1-88fe-445d-86e2-67f3667de8d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.250407] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1018.250767] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1018.251293] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.251554] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1018.253127] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1018.253127] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-795b05d1-7fcc-4a0e-b278-f1ed899e214e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.255200] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1018.255200] env[68217]: value = "task-2961804" [ 1018.255200] env[68217]: _type = "Task" [ 1018.255200] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.264128] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961804, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.265490] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1018.265686] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1018.266504] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3eeaf813-2eac-45aa-8b73-ac3c0088fd11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.271877] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1018.271877] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f02522-16ae-f8c5-35d4-2c93de9c59bb" [ 1018.271877] env[68217]: _type = "Task" [ 1018.271877] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.281726] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f02522-16ae-f8c5-35d4-2c93de9c59bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.371985] env[68217]: INFO nova.compute.manager [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] instance snapshotting [ 1018.372643] env[68217]: DEBUG nova.objects.instance [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'flavor' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.385187] env[68217]: DEBUG oslo_vmware.api [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961801, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.621654] env[68217]: DEBUG oslo_vmware.api [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961798, 'name': PowerOnVM_Task, 'duration_secs': 1.684051} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.625550] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1018.626097] env[68217]: INFO nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Took 14.26 seconds to spawn the instance on the hypervisor. [ 1018.626355] env[68217]: DEBUG nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1018.627292] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82739471-6965-41db-887d-4629071b3c53 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.640253] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961802, 'name': Rename_Task, 'duration_secs': 0.199952} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.642802] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1018.645733] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fc4dca2b-0f15-4a14-ac1a-bc995b6cda02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.654860] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1018.654860] env[68217]: value = "task-2961805" [ 1018.654860] env[68217]: _type = "Task" [ 1018.654860] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.665341] env[68217]: INFO nova.compute.manager [-] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Took 1.38 seconds to deallocate network for instance. [ 1018.670065] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961805, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.677431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1018.740938] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf997cad-9882-4621-a9dd-3ff2e27e94fc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.753898] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-427691d7-17c4-4b81-b119-9b13b0b4740e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.768828] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961804, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.802645] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db1045cc-530a-4c92-9e01-a1db443c3874 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.811865] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f02522-16ae-f8c5-35d4-2c93de9c59bb, 'name': SearchDatastore_Task, 'duration_secs': 0.011899} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.815269] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9fc495d2-583e-4db3-936b-5c99b09c544b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.818974] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-365827a7-a06e-484e-b966-1cfb8cbb0333 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.828149] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1018.828149] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52960172-b5b0-001a-69d9-088125443f42" [ 1018.828149] env[68217]: _type = "Task" [ 1018.828149] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.836827] env[68217]: DEBUG nova.compute.provider_tree [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.848514] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52960172-b5b0-001a-69d9-088125443f42, 'name': SearchDatastore_Task, 'duration_secs': 0.018923} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.849532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1018.849832] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. {{(pid=68217) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1018.850187] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-74f8fe20-5ce5-4836-aa51-b88cac772958 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.859940] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1018.859940] env[68217]: value = "task-2961806" [ 1018.859940] env[68217]: _type = "Task" [ 1018.859940] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.871646] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961806, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1018.881657] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12cb7d9b-b544-489f-85a8-b4899bb5fab3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.889693] env[68217]: DEBUG oslo_vmware.api [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961801, 'name': ReconfigVM_Task, 'duration_secs': 0.787785} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1018.903111] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Reconfigured VM instance instance-00000057 to attach disk [datastore1] volume-510a8087-136f-46d8-91cf-d7ca59a54445/volume-510a8087-136f-46d8-91cf-d7ca59a54445.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1018.908444] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-97b1f401-43e3-48cd-b630-8c17be704a97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.919227] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43efb207-d6d6-44e1-967f-ed9e6936fd25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.931525] env[68217]: DEBUG oslo_vmware.api [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1018.931525] env[68217]: value = "task-2961807" [ 1018.931525] env[68217]: _type = "Task" [ 1018.931525] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1018.942703] env[68217]: DEBUG oslo_vmware.api [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961807, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.023489] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1019.024046] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5883c154-dee8-4b9d-892d-3c5d16df6f3e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.031436] env[68217]: DEBUG oslo_vmware.api [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1019.031436] env[68217]: value = "task-2961808" [ 1019.031436] env[68217]: _type = "Task" [ 1019.031436] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.039806] env[68217]: DEBUG oslo_vmware.api [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961808, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.162400] env[68217]: INFO nova.compute.manager [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Took 37.44 seconds to build instance. [ 1019.167133] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961805, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.179785] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.269426] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961804, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644958} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.269684] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] ffff4cf4-f663-4965-84d1-8351bfde1252/ffff4cf4-f663-4965-84d1-8351bfde1252.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1019.269899] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1019.270184] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e6225100-d081-4705-a2a4-a63d6ee5fc0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.280283] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1019.280283] env[68217]: value = "task-2961809" [ 1019.280283] env[68217]: _type = "Task" [ 1019.280283] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.291839] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961809, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.343591] env[68217]: DEBUG nova.scheduler.client.report [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1019.373338] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961806, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.434566] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1019.435240] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-372b76d7-d1e7-48f3-a405-94cbbfed944e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.447356] env[68217]: DEBUG oslo_vmware.api [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961807, 'name': ReconfigVM_Task, 'duration_secs': 0.214842} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.448876] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594367', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'name': 'volume-510a8087-136f-46d8-91cf-d7ca59a54445', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '776798bf-1ad4-4acb-ac58-cacc5493e1c7', 'attached_at': '', 'detached_at': '', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'serial': '510a8087-136f-46d8-91cf-d7ca59a54445'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1019.450481] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1019.450481] env[68217]: value = "task-2961810" [ 1019.450481] env[68217]: _type = "Task" [ 1019.450481] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.458612] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961810, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.513818] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.541072] env[68217]: DEBUG oslo_vmware.api [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961808, 'name': PowerOffVM_Task, 'duration_secs': 0.414114} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.541361] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1019.541575] env[68217]: DEBUG nova.compute.manager [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1019.542575] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0074410-957a-42a8-8226-46a8721f2bd1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.565000] env[68217]: DEBUG nova.compute.manager [req-c1beca25-9423-49df-b1c7-b392502262c6 req-c124e578-833a-4418-8d6a-545db178cea0 service nova] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Received event network-vif-deleted-1b0e0705-6a81-45e3-b5b0-832547fed562 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1019.667711] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1525b7f-5547-4044-accd-b829a5677dd9 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 38.959s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.667711] env[68217]: DEBUG oslo_vmware.api [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961805, 'name': PowerOnVM_Task, 'duration_secs': 0.862949} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.668100] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.154s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.668100] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1019.668270] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.668435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.669957] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1019.670182] env[68217]: INFO nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Took 10.46 seconds to spawn the instance on the hypervisor. [ 1019.670361] env[68217]: DEBUG nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1019.670829] env[68217]: INFO nova.compute.manager [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Terminating instance [ 1019.672488] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30614ec7-5bb8-4614-8dbb-00cc971322fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.790904] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961809, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.129989} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.791436] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1019.792262] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-281330c4-fc3f-4b66-a4a2-8fbbdf33f4b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.814942] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] ffff4cf4-f663-4965-84d1-8351bfde1252/ffff4cf4-f663-4965-84d1-8351bfde1252.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.815232] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e55f4e35-bb20-4dfa-ad9f-04c196288d79 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.835255] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1019.835255] env[68217]: value = "task-2961811" [ 1019.835255] env[68217]: _type = "Task" [ 1019.835255] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.843248] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961811, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.848439] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.902s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1019.850518] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.174s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1019.852041] env[68217]: INFO nova.compute.claims [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1019.871181] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961806, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.612039} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1019.871589] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. [ 1019.872635] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec60eb65-8d52-40a4-a775-6ac04d8fc8ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.876633] env[68217]: INFO nova.scheduler.client.report [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Deleted allocations for instance d1fcac61-0d2a-4331-9042-af11c3c36ae4 [ 1019.898160] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Reconfiguring VM instance instance-0000005c to attach disk [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1019.901145] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2765f251-7fa0-40ce-b350-16d28b4fc68f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1019.919446] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1019.919446] env[68217]: value = "task-2961812" [ 1019.919446] env[68217]: _type = "Task" [ 1019.919446] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1019.927570] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961812, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1019.962664] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961810, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.053727] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f8f6b3ed-e813-40ae-ba65-8231a98fb5f5 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.177975] env[68217]: DEBUG nova.compute.manager [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1020.178091] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1020.179048] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-543b63bc-5287-471a-9995-aab6ef554f61 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.190203] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1020.193951] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5673aec9-20c8-4e51-86a3-fee3953d4a0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.196606] env[68217]: INFO nova.compute.manager [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Took 33.01 seconds to build instance. [ 1020.202564] env[68217]: DEBUG oslo_vmware.api [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1020.202564] env[68217]: value = "task-2961813" [ 1020.202564] env[68217]: _type = "Task" [ 1020.202564] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.214105] env[68217]: DEBUG oslo_vmware.api [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961813, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.345415] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961811, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.417238] env[68217]: DEBUG oslo_concurrency.lockutils [None req-62671cc2-8d64-49e3-a6b7-0270890d449d tempest-AttachInterfacesV270Test-1918469395 tempest-AttachInterfacesV270Test-1918469395-project-member] Lock "d1fcac61-0d2a-4331-9042-af11c3c36ae4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.470s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.430062] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961812, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.464207] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961810, 'name': CreateSnapshot_Task, 'duration_secs': 0.979138} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.464651] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1020.465470] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed2f32b0-4aa0-4700-a49c-50e58b19f0f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.493704] env[68217]: DEBUG nova.objects.instance [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'flavor' on Instance uuid 776798bf-1ad4-4acb-ac58-cacc5493e1c7 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.703460] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c6cb1f42-13e1-47c5-a788-521898988623 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "33802025-7f72-4ad9-80fe-b15196b1a577" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 34.524s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1020.705538] env[68217]: INFO nova.compute.manager [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Rebuilding instance [ 1020.721670] env[68217]: DEBUG oslo_vmware.api [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961813, 'name': PowerOffVM_Task, 'duration_secs': 0.248862} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.721968] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1020.722167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1020.722443] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e8da6209-811f-4886-852a-9446be4773c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.763623] env[68217]: DEBUG nova.compute.manager [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1020.763623] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bc5a35-af00-441a-aca2-8ae9679737e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.822046] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1020.822046] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1020.822046] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Deleting the datastore file [datastore1] 34f176e7-f98e-4eda-aee9-45e44d5ffb85 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1020.822046] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-54843607-1068-41b9-9410-a3ca23c2beb6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.828548] env[68217]: DEBUG oslo_vmware.api [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for the task: (returnval){ [ 1020.828548] env[68217]: value = "task-2961815" [ 1020.828548] env[68217]: _type = "Task" [ 1020.828548] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.837423] env[68217]: DEBUG oslo_vmware.api [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961815, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.847029] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961811, 'name': ReconfigVM_Task, 'duration_secs': 0.691049} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.847490] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Reconfigured VM instance instance-00000063 to attach disk [datastore1] ffff4cf4-f663-4965-84d1-8351bfde1252/ffff4cf4-f663-4965-84d1-8351bfde1252.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.848159] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7540565c-67a8-4ceb-aedb-9e49a4a12111 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.855999] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1020.855999] env[68217]: value = "task-2961816" [ 1020.855999] env[68217]: _type = "Task" [ 1020.855999] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.866961] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961816, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1020.933792] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961812, 'name': ReconfigVM_Task, 'duration_secs': 0.638565} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1020.934335] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Reconfigured VM instance instance-0000005c to attach disk [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1020.935528] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15921b1e-cc94-4391-9db5-9f769efa34e5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.970890] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-60cedbe6-6eac-4dc5-b161-b254c96359bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.988963] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1020.988963] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-1855321d-dc66-43b9-8939-53f29451deba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.999143] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1020.999143] env[68217]: value = "task-2961817" [ 1020.999143] env[68217]: _type = "Task" [ 1020.999143] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.002574] env[68217]: DEBUG oslo_concurrency.lockutils [None req-501b6af3-d4c9-4ab9-b86d-b54509faf1e0 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.843s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.004441] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1021.004441] env[68217]: value = "task-2961818" [ 1021.004441] env[68217]: _type = "Task" [ 1021.004441] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.017195] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.020574] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961818, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.150917] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4faf86c-13eb-4e10-81a9-47f994760603 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.164096] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba20bd47-cf5c-4216-a6a9-a76cf915aed9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.205206] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81f71625-8287-49d0-89c8-6b7ef200e841 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.213829] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3b266c2-9483-4bdf-8336-5516ef9e1c12 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.231435] env[68217]: DEBUG nova.compute.provider_tree [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1021.341648] env[68217]: DEBUG oslo_vmware.api [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Task: {'id': task-2961815, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.19194} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.341960] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1021.342379] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1021.342759] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1021.342759] env[68217]: INFO nova.compute.manager [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1021.343148] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1021.343365] env[68217]: DEBUG nova.compute.manager [-] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1021.343465] env[68217]: DEBUG nova.network.neutron [-] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1021.365979] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961816, 'name': Rename_Task, 'duration_secs': 0.140186} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.366393] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.366706] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-13aa4eeb-8c44-484a-a906-3c9ee1472b67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.373786] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1021.373786] env[68217]: value = "task-2961819" [ 1021.373786] env[68217]: _type = "Task" [ 1021.373786] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.381940] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.517714] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961817, 'name': ReconfigVM_Task, 'duration_secs': 0.169318} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.521965] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1021.522246] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961818, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.525234] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3bf5fdb9-349b-488f-b714-fa2a3f660734 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.534497] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1021.534497] env[68217]: value = "task-2961820" [ 1021.534497] env[68217]: _type = "Task" [ 1021.534497] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.545867] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961820, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.734687] env[68217]: DEBUG nova.scheduler.client.report [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1021.776074] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1021.776579] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c308b444-19a9-441c-971c-d2d06bbff03d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.784921] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1021.784921] env[68217]: value = "task-2961821" [ 1021.784921] env[68217]: _type = "Task" [ 1021.784921] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.796204] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1021.796498] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1021.797336] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4a94eb6-4908-48e9-9f75-1c851b5d86b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.806399] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1021.807032] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-645995bf-fd3d-42ba-bf4c-8d50818a1549 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.821954] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "33802025-7f72-4ad9-80fe-b15196b1a577" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.822303] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "33802025-7f72-4ad9-80fe-b15196b1a577" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.822523] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "33802025-7f72-4ad9-80fe-b15196b1a577-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1021.822704] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "33802025-7f72-4ad9-80fe-b15196b1a577-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1021.822869] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "33802025-7f72-4ad9-80fe-b15196b1a577-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1021.824959] env[68217]: INFO nova.compute.manager [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Terminating instance [ 1021.868035] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1021.868486] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1021.868582] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleting the datastore file [datastore2] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1021.868864] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-062ee352-565e-4c8f-9f65-3f83131c03c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.876386] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1021.876386] env[68217]: value = "task-2961823" [ 1021.876386] env[68217]: _type = "Task" [ 1021.876386] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1021.891175] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961823, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.895836] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961819, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.018939] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961818, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.051972] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "1faf45fb-a3b0-4647-b63d-3f51695b6171" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.052271] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.059743] env[68217]: DEBUG oslo_vmware.api [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961820, 'name': PowerOnVM_Task, 'duration_secs': 0.378889} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.059994] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.062636] env[68217]: DEBUG nova.compute.manager [None req-24c32c38-c27d-418e-8df8-8948937332b2 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.063536] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a88223a4-e300-4564-af84-806770a73b69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.244370] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1022.245046] env[68217]: DEBUG nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1022.248240] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.068s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.248606] env[68217]: DEBUG nova.objects.instance [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lazy-loading 'resources' on Instance uuid 105e6181-19c4-466b-88a0-cdbca2cac230 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.255917] env[68217]: DEBUG nova.compute.manager [req-e25bf4b8-f9e6-48c8-9f29-439c5b95ded2 req-0d91f93b-fbf8-46e6-9992-8319f471a513 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received event network-vif-deleted-a9ab050e-fd0e-469a-8c0a-a9794739f06b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1022.256178] env[68217]: INFO nova.compute.manager [req-e25bf4b8-f9e6-48c8-9f29-439c5b95ded2 req-0d91f93b-fbf8-46e6-9992-8319f471a513 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Neutron deleted interface a9ab050e-fd0e-469a-8c0a-a9794739f06b; detaching it from the instance and deleting it from the info cache [ 1022.256511] env[68217]: DEBUG nova.network.neutron [req-e25bf4b8-f9e6-48c8-9f29-439c5b95ded2 req-0d91f93b-fbf8-46e6-9992-8319f471a513 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Updating instance_info_cache with network_info: [{"id": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "address": "fa:16:3e:4a:e2:54", "network": {"id": "0149085c-6f20-47db-87ac-aa2336e92240", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1636213118", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.29", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "905b15e740ad4f879ba61518ba400680", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c6365036-aa37-44d2-90d1-ca1c3516ded9", "external-id": "nsx-vlan-transportzone-66", "segmentation_id": 66, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap79c6bcfe-54", "ovs_interfaceid": "79c6bcfe-54df-47ec-a39b-84e3bba24e55", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1022.330963] env[68217]: DEBUG nova.compute.manager [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1022.331212] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1022.332666] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1be585-2ff9-4fd9-8544-1a9080351d1b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.346766] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.347100] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-5d029d62-6632-4d7e-9fba-dcbfcceee483 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.353900] env[68217]: DEBUG oslo_vmware.api [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1022.353900] env[68217]: value = "task-2961824" [ 1022.353900] env[68217]: _type = "Task" [ 1022.353900] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.364531] env[68217]: DEBUG oslo_vmware.api [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961824, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.387612] env[68217]: DEBUG oslo_vmware.api [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961819, 'name': PowerOnVM_Task, 'duration_secs': 0.715625} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.388482] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1022.388737] env[68217]: INFO nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Took 8.93 seconds to spawn the instance on the hypervisor. [ 1022.388945] env[68217]: DEBUG nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1022.389825] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33f7479f-7fff-40cc-b254-c48444fd416b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.396946] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961823, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231336} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.396946] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1022.396946] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1022.396946] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1022.520567] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961818, 'name': CloneVM_Task, 'duration_secs': 1.318866} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.520893] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Created linked-clone VM from snapshot [ 1022.521712] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88535482-5050-4067-9584-02504b446bc8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.530316] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Uploading image 6e605951-4bf8-4242-b4cf-a140328995ff {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1022.555432] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1022.555432] env[68217]: value = "vm-594370" [ 1022.555432] env[68217]: _type = "VirtualMachine" [ 1022.555432] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1022.555819] env[68217]: DEBUG nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1022.558416] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-048dbd77-d5c2-4213-bab2-012bf977fa6a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.566723] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease: (returnval){ [ 1022.566723] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52237955-b34c-42f5-174f-2151ceff2708" [ 1022.566723] env[68217]: _type = "HttpNfcLease" [ 1022.566723] env[68217]: } obtained for exporting VM: (result){ [ 1022.566723] env[68217]: value = "vm-594370" [ 1022.566723] env[68217]: _type = "VirtualMachine" [ 1022.566723] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1022.566993] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the lease: (returnval){ [ 1022.566993] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52237955-b34c-42f5-174f-2151ceff2708" [ 1022.566993] env[68217]: _type = "HttpNfcLease" [ 1022.566993] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1022.573631] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1022.573631] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52237955-b34c-42f5-174f-2151ceff2708" [ 1022.573631] env[68217]: _type = "HttpNfcLease" [ 1022.573631] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1022.758569] env[68217]: DEBUG nova.compute.utils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1022.761161] env[68217]: DEBUG nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1022.761648] env[68217]: DEBUG nova.network.neutron [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1022.764635] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9592bfc8-5ff4-449b-922c-ae4f901fb161 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.777252] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-283285d7-cfb0-4712-b315-e755abd86094 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.796985] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a7aa0d3-c493-4214-8982-7d50ca130889 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1022.797159] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a7aa0d3-c493-4214-8982-7d50ca130889 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1022.797466] env[68217]: DEBUG nova.objects.instance [None req-9a7aa0d3-c493-4214-8982-7d50ca130889 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'flavor' on Instance uuid 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1022.821899] env[68217]: DEBUG nova.compute.manager [req-e25bf4b8-f9e6-48c8-9f29-439c5b95ded2 req-0d91f93b-fbf8-46e6-9992-8319f471a513 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Detach interface failed, port_id=a9ab050e-fd0e-469a-8c0a-a9794739f06b, reason: Instance 34f176e7-f98e-4eda-aee9-45e44d5ffb85 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1022.841967] env[68217]: DEBUG nova.policy [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c867c8ebcaeb49ec91f751e2be5349b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46f4c8c2f4764bd1b995396126b6aaf3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1022.867077] env[68217]: DEBUG oslo_vmware.api [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961824, 'name': PowerOffVM_Task, 'duration_secs': 0.319961} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1022.869896] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1022.870095] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1022.870536] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77ee2bd1-ae9c-4470-8979-fc21702e5b11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.918617] env[68217]: INFO nova.compute.manager [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Took 22.73 seconds to build instance. [ 1022.930374] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1022.930652] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1022.930861] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Deleting the datastore file [datastore1] 33802025-7f72-4ad9-80fe-b15196b1a577 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1022.931465] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53e54397-c180-4f45-a8ab-916420caf415 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.942462] env[68217]: DEBUG oslo_vmware.api [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for the task: (returnval){ [ 1022.942462] env[68217]: value = "task-2961827" [ 1022.942462] env[68217]: _type = "Task" [ 1022.942462] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.954428] env[68217]: DEBUG oslo_vmware.api [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.965640] env[68217]: DEBUG nova.network.neutron [-] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.074784] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1023.074784] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52237955-b34c-42f5-174f-2151ceff2708" [ 1023.074784] env[68217]: _type = "HttpNfcLease" [ 1023.074784] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1023.075107] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1023.075107] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52237955-b34c-42f5-174f-2151ceff2708" [ 1023.075107] env[68217]: _type = "HttpNfcLease" [ 1023.075107] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1023.075813] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e92a3853-d6b2-41f9-8fd2-76c43f14d65a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.083778] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c6f688-1191-8ada-0af1-ce794aca4922/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1023.083961] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c6f688-1191-8ada-0af1-ce794aca4922/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1023.086064] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1023.156396] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6bf84bd-b906-4a23-8107-2221ac1afff7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.163982] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4a5ef4-99d0-48ea-a150-de002fd44278 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.199887] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bc81acd-2255-4480-a9c7-cff418c12256 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.203474] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-bc7a0ded-ba11-4fd6-9ffc-2339ff0e598f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.210761] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b3dba3-6af5-4ff4-a60c-02014003ebf6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.223913] env[68217]: DEBUG nova.compute.provider_tree [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1023.265443] env[68217]: DEBUG nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1023.269200] env[68217]: DEBUG nova.network.neutron [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Successfully created port: 7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1023.301671] env[68217]: DEBUG nova.objects.instance [None req-9a7aa0d3-c493-4214-8982-7d50ca130889 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'pci_requests' on Instance uuid 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1023.421470] env[68217]: DEBUG oslo_concurrency.lockutils [None req-da299dd1-74d2-4367-8058-b9e76b09d0b7 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.239s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.443787] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1023.444029] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1023.444164] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1023.444352] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1023.444500] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1023.444645] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1023.444850] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1023.445076] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1023.445267] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1023.445433] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1023.445605] env[68217]: DEBUG nova.virt.hardware [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1023.446562] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c0417a6-ea8a-4278-b678-1e6080473d76 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.461810] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a61fbdd0-728a-4090-84ab-82f40944fa4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.468036] env[68217]: DEBUG oslo_vmware.api [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Task: {'id': task-2961827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143166} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.468448] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1023.468675] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1023.468947] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1023.469145] env[68217]: INFO nova.compute.manager [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1023.469516] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1023.470086] env[68217]: DEBUG nova.compute.manager [-] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1023.470215] env[68217]: DEBUG nova.network.neutron [-] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1023.471948] env[68217]: INFO nova.compute.manager [-] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Took 2.13 seconds to deallocate network for instance. [ 1023.482045] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:64:1c:67', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b5d29b01-b9b6-4d25-9fef-4a335cf05875', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.490028] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1023.494021] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.494424] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-77e4829c-2c3f-4e6f-8303-0b4e64e6d0f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.517541] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.517541] env[68217]: value = "task-2961828" [ 1023.517541] env[68217]: _type = "Task" [ 1023.517541] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.528505] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961828, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.729848] env[68217]: DEBUG nova.scheduler.client.report [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1023.810040] env[68217]: DEBUG nova.objects.base [None req-9a7aa0d3-c493-4214-8982-7d50ca130889 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Object Instance<35c4ab95-fc14-4bd4-a2a5-64f15f070b88> lazy-loaded attributes: flavor,pci_requests {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1023.810350] env[68217]: DEBUG nova.network.neutron [None req-9a7aa0d3-c493-4214-8982-7d50ca130889 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1023.954013] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9a7aa0d3-c493-4214-8982-7d50ca130889 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.156s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1023.996061] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1024.028456] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961828, 'name': CreateVM_Task, 'duration_secs': 0.423053} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.028969] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.029866] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.030220] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.030954] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1024.031359] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2285ca2e-6811-4f92-80c2-1656e9533f04 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.037680] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1024.037680] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529cfe78-b925-253b-35c5-4885b1e36109" [ 1024.037680] env[68217]: _type = "Task" [ 1024.037680] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.048039] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529cfe78-b925-253b-35c5-4885b1e36109, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.237344] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.989s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.240756] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.154s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1024.242083] env[68217]: INFO nova.compute.claims [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1024.257723] env[68217]: INFO nova.scheduler.client.report [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted allocations for instance 105e6181-19c4-466b-88a0-cdbca2cac230 [ 1024.278035] env[68217]: DEBUG nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1024.302930] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1024.303283] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1024.303370] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1024.303640] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1024.303827] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1024.304145] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1024.304365] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1024.304566] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1024.304954] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1024.305459] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1024.305813] env[68217]: DEBUG nova.virt.hardware [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1024.307612] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-380ce868-86de-4440-abe0-07b32fb9434d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.320309] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf6603a1-273d-4591-92e1-32441e923e30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.370921] env[68217]: DEBUG nova.network.neutron [-] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.488498] env[68217]: DEBUG nova.compute.manager [req-3ac46b50-db9e-4aac-b020-6e9dcdef957b req-7e665502-5be4-4f36-9895-ea1ceea44bd8 service nova] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Received event network-vif-deleted-79c6bcfe-54df-47ec-a39b-84e3bba24e55 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1024.489215] env[68217]: DEBUG nova.compute.manager [req-3ac46b50-db9e-4aac-b020-6e9dcdef957b req-7e665502-5be4-4f36-9895-ea1ceea44bd8 service nova] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Received event network-vif-deleted-c02a5df2-09c6-499e-8c8a-5e198ba3ef9e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1024.549078] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529cfe78-b925-253b-35c5-4885b1e36109, 'name': SearchDatastore_Task, 'duration_secs': 0.012732} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.549797] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1024.549797] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.551230] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.551230] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.551230] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.551230] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5e616a64-c38b-491e-b1dc-a571d1ee4937 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.561351] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.561672] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.562742] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf07f6e2-cd41-4b9d-92da-ead29af673da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.569525] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1024.569525] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d603b-c74a-af59-d0d9-137cdc7cff1f" [ 1024.569525] env[68217]: _type = "Task" [ 1024.569525] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.578189] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d603b-c74a-af59-d0d9-137cdc7cff1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.591984] env[68217]: DEBUG nova.compute.manager [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1024.769227] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c20a4a-4429-45a7-9b02-f04978b856a0 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "105e6181-19c4-466b-88a0-cdbca2cac230" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.622s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1024.846852] env[68217]: INFO nova.compute.manager [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Unrescuing [ 1024.847325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.847325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1024.847674] env[68217]: DEBUG nova.network.neutron [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1024.873375] env[68217]: INFO nova.compute.manager [-] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Took 1.40 seconds to deallocate network for instance. [ 1025.068544] env[68217]: DEBUG nova.network.neutron [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Successfully updated port: 7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1025.082818] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d603b-c74a-af59-d0d9-137cdc7cff1f, 'name': SearchDatastore_Task, 'duration_secs': 0.017828} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.084556] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ebdac79-9497-48ca-829c-1a586a81b102 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.090871] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1025.090871] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a7db10-b9bd-565d-8ebc-cce5a4bdcd8b" [ 1025.090871] env[68217]: _type = "Task" [ 1025.090871] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.101585] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a7db10-b9bd-565d-8ebc-cce5a4bdcd8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.115676] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.380724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.552485] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc0bce2c-307c-4288-81d6-01250698eaad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.560851] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1cc45c6-71be-41e9-ba0d-3d0da83fd68d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.602943] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1025.603159] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1025.603404] env[68217]: DEBUG nova.network.neutron [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1025.610622] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c4824f8-2905-47ce-884a-c45b524109d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.620808] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a7db10-b9bd-565d-8ebc-cce5a4bdcd8b, 'name': SearchDatastore_Task, 'duration_secs': 0.012261} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.623319] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1025.623919] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.624038] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e132928f-3e1a-466a-8b8e-d8f4926d4fb6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.627125] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-012c60dd-8ee0-45fa-a59a-eddb8be4bee2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.643971] env[68217]: DEBUG nova.compute.provider_tree [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.646925] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1025.646925] env[68217]: value = "task-2961829" [ 1025.646925] env[68217]: _type = "Task" [ 1025.646925] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.656019] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.685736] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "08ba7156-1c6d-4385-939c-bdd575c7fda3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.685983] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.931999] env[68217]: DEBUG nova.network.neutron [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Updating instance_info_cache with network_info: [{"id": "46e4edb9-72c4-4a7d-af91-4b553d829391", "address": "fa:16:3e:bb:64:fc", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap46e4edb9-72", "ovs_interfaceid": "46e4edb9-72c4-4a7d-af91-4b553d829391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1025.969582] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1025.969854] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1025.970250] env[68217]: DEBUG nova.objects.instance [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'flavor' on Instance uuid 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.137350] env[68217]: DEBUG nova.network.neutron [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1026.148702] env[68217]: DEBUG nova.scheduler.client.report [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1026.164430] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961829, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.189127] env[68217]: DEBUG nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1026.338653] env[68217]: DEBUG nova.network.neutron [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1026.435637] env[68217]: DEBUG oslo_concurrency.lockutils [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.436762] env[68217]: DEBUG nova.objects.instance [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lazy-loading 'flavor' on Instance uuid e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.555992] env[68217]: DEBUG nova.objects.instance [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'pci_requests' on Instance uuid 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.656845] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.416s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.657465] env[68217]: DEBUG nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1026.662273] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.666s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.662273] env[68217]: DEBUG nova.objects.instance [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lazy-loading 'resources' on Instance uuid 34f176e7-f98e-4eda-aee9-45e44d5ffb85 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.667870] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961829, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.581036} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.668197] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.668425] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.669356] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53b3c003-d8f8-4e2b-a55c-78219d83e5e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.677305] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1026.677305] env[68217]: value = "task-2961830" [ 1026.677305] env[68217]: _type = "Task" [ 1026.677305] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.686723] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961830, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.693583] env[68217]: DEBUG nova.compute.manager [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Received event network-vif-plugged-7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1026.693634] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.693850] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1026.694224] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1026.694286] env[68217]: DEBUG nova.compute.manager [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] No waiting events found dispatching network-vif-plugged-7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1026.694837] env[68217]: WARNING nova.compute.manager [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Received unexpected event network-vif-plugged-7ca41605-8ab9-4d01-835b-70d47e78fce9 for instance with vm_state building and task_state spawning. [ 1026.694837] env[68217]: DEBUG nova.compute.manager [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Received event network-changed-7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1026.695039] env[68217]: DEBUG nova.compute.manager [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Refreshing instance network info cache due to event network-changed-7ca41605-8ab9-4d01-835b-70d47e78fce9. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1026.695140] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1026.715924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1026.841614] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1026.841972] env[68217]: DEBUG nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Instance network_info: |[{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1026.842286] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1026.842466] env[68217]: DEBUG nova.network.neutron [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Refreshing network info cache for port 7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1026.843733] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:5b:d2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ca41605-8ab9-4d01-835b-70d47e78fce9', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1026.851553] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Creating folder: Project (46f4c8c2f4764bd1b995396126b6aaf3). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1026.852074] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-adc88aac-62d8-4464-9963-f05a01ee4a98 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.863085] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Created folder: Project (46f4c8c2f4764bd1b995396126b6aaf3) in parent group-v594094. [ 1026.863284] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Creating folder: Instances. Parent ref: group-v594372. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1026.863529] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-367c64b3-5f0c-48b5-ab8f-07ed14e3fca6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.872766] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Created folder: Instances in parent group-v594372. [ 1026.872997] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1026.873217] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1026.873421] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e50ff65c-a289-4b70-847a-d76e15594375 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.894508] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1026.894508] env[68217]: value = "task-2961833" [ 1026.894508] env[68217]: _type = "Task" [ 1026.894508] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.902345] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961833, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.943786] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b04c884-f12d-4735-b3cc-045e38cfa8bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.968023] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.968426] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-50c58f28-7195-4367-a366-2a16186ad6a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.974998] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1026.974998] env[68217]: value = "task-2961834" [ 1026.974998] env[68217]: _type = "Task" [ 1026.974998] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.983038] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961834, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.059589] env[68217]: DEBUG nova.objects.base [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Object Instance<35c4ab95-fc14-4bd4-a2a5-64f15f070b88> lazy-loaded attributes: flavor,pci_requests {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1027.059801] env[68217]: DEBUG nova.network.neutron [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1027.133287] env[68217]: DEBUG nova.policy [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1027.163446] env[68217]: DEBUG nova.compute.utils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1027.165754] env[68217]: DEBUG nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1027.166084] env[68217]: DEBUG nova.network.neutron [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1027.189703] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961830, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.149393} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.190032] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1027.190903] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fdeffe4-429d-4f5f-a9f8-35c756481292 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.229983] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1027.232625] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8a1aa958-7514-42b5-9dcd-fea293001fed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.256321] env[68217]: DEBUG nova.policy [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd229078579a54e6991e85bc49326c0b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3068bf39ee943f1bdf378f8b2a5c360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1027.267242] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1027.267242] env[68217]: value = "task-2961835" [ 1027.267242] env[68217]: _type = "Task" [ 1027.267242] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.280151] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961835, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.406138] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961833, 'name': CreateVM_Task, 'duration_secs': 0.472114} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.409864] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1027.409864] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.409864] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.409864] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1027.412062] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0fefe4a6-afba-428c-9f65-ec186a614cc7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.421022] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1027.421022] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f03d9d-0848-0587-c5ec-c6629f46610a" [ 1027.421022] env[68217]: _type = "Task" [ 1027.421022] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.428492] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f03d9d-0848-0587-c5ec-c6629f46610a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.485781] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961834, 'name': PowerOffVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.514020] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32a393e0-ce71-4366-9db3-52131ef43d71 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.521819] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432af162-1a02-4989-b4a3-0f90d6808956 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.561518] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0ef000b-f42b-49b4-9d1a-56b762fcd7dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.571045] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71acae9a-7115-4e03-94c5-6e723592dd79 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.585411] env[68217]: DEBUG nova.compute.provider_tree [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1027.671723] env[68217]: DEBUG nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1027.781531] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961835, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.929680] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f03d9d-0848-0587-c5ec-c6629f46610a, 'name': SearchDatastore_Task, 'duration_secs': 0.023953} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.929948] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1027.930204] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1027.930537] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1027.930622] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1027.930912] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1027.931143] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8bb2b069-462b-4989-b87a-9f809f08e742 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.940945] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1027.941155] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1027.941910] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc9e1f89-d83e-480c-aced-1054ed3c5ba7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1027.947372] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1027.947372] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52060189-9173-e04b-a7be-2304e951ccab" [ 1027.947372] env[68217]: _type = "Task" [ 1027.947372] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1027.960794] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52060189-9173-e04b-a7be-2304e951ccab, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.971755] env[68217]: DEBUG nova.network.neutron [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Successfully created port: bd03a7d5-905c-4c0d-9972-f46ba5682fd7 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1027.988147] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961834, 'name': PowerOffVM_Task, 'duration_secs': 0.530349} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.988468] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1027.994290] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Reconfiguring VM instance instance-0000005c to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1027.994625] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be4359ae-006f-476d-9abd-ea2ee3bf8254 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.012535] env[68217]: DEBUG nova.network.neutron [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updated VIF entry in instance network info cache for port 7ca41605-8ab9-4d01-835b-70d47e78fce9. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1028.012774] env[68217]: DEBUG nova.network.neutron [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1028.027265] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1028.027265] env[68217]: value = "task-2961836" [ 1028.027265] env[68217]: _type = "Task" [ 1028.027265] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.038488] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961836, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.089044] env[68217]: DEBUG nova.scheduler.client.report [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1028.209059] env[68217]: DEBUG nova.network.neutron [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Successfully created port: 0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1028.281484] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961835, 'name': ReconfigVM_Task, 'duration_secs': 0.548059} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.281768] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Reconfigured VM instance instance-00000061 to attach disk [datastore1] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179/df4c3a34-2dea-4f82-9ea6-7a9eb1c03179.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1028.282409] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-643fed75-092b-4979-86f1-6693e31ecfd0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.288990] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1028.288990] env[68217]: value = "task-2961837" [ 1028.288990] env[68217]: _type = "Task" [ 1028.288990] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.300444] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961837, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.459030] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52060189-9173-e04b-a7be-2304e951ccab, 'name': SearchDatastore_Task, 'duration_secs': 0.012805} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.460846] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e5ff837c-234f-4687-9bb0-0095b1fe8fb4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.467265] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1028.467265] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52364a1a-84f1-821d-cf28-b7c096e64b04" [ 1028.467265] env[68217]: _type = "Task" [ 1028.467265] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.476600] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52364a1a-84f1-821d-cf28-b7c096e64b04, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.519453] env[68217]: DEBUG oslo_concurrency.lockutils [req-ad8c9338-74ba-4d17-9a59-e586cde3e6ff req-c1fe4769-bb84-47e9-b3b8-2247aa0505b5 service nova] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.536862] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961836, 'name': ReconfigVM_Task, 'duration_secs': 0.37593} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.537192] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Reconfigured VM instance instance-0000005c to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1028.537391] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.537648] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9deb4961-8f1b-480b-a0c3-376719e3a865 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.544079] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1028.544079] env[68217]: value = "task-2961838" [ 1028.544079] env[68217]: _type = "Task" [ 1028.544079] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.552603] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961838, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.595452] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.934s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1028.598255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 3.482s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1028.635949] env[68217]: INFO nova.scheduler.client.report [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Deleted allocations for instance 34f176e7-f98e-4eda-aee9-45e44d5ffb85 [ 1028.687526] env[68217]: DEBUG nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1028.718359] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1028.719084] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.719084] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1028.719084] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.719227] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1028.719436] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1028.719637] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1028.719844] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1028.720082] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1028.720289] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1028.720501] env[68217]: DEBUG nova.virt.hardware [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1028.721449] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7387b67b-d2e1-464c-8a55-9161d759795a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.731224] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ceb08a7-7487-4130-b6da-162c01dc5d6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.798847] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961837, 'name': Rename_Task, 'duration_secs': 0.174162} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.799071] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.799397] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b391bbf7-f507-4c31-a8a1-30aa2a61316f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.806807] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1028.806807] env[68217]: value = "task-2961839" [ 1028.806807] env[68217]: _type = "Task" [ 1028.806807] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.816708] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961839, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.979151] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52364a1a-84f1-821d-cf28-b7c096e64b04, 'name': SearchDatastore_Task, 'duration_secs': 0.010709} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.979151] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1028.979151] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d28bcf16-b081-4dc8-a975-2acaed222e15/d28bcf16-b081-4dc8-a975-2acaed222e15.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1028.979700] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-13e7b379-513e-45aa-8033-62d745caf7d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.987183] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1028.987183] env[68217]: value = "task-2961840" [ 1028.987183] env[68217]: _type = "Task" [ 1028.987183] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.996577] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961840, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.055819] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961838, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.102956] env[68217]: INFO nova.compute.claims [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1029.147347] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cfd57d4-6d24-4c58-adf0-8c2f61ee99b8 tempest-ServersTestMultiNic-1769775128 tempest-ServersTestMultiNic-1769775128-project-member] Lock "34f176e7-f98e-4eda-aee9-45e44d5ffb85" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.479s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1029.321891] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961839, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.499681] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961840, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1029.556178] env[68217]: DEBUG oslo_vmware.api [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961838, 'name': PowerOnVM_Task, 'duration_secs': 0.523311} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.556459] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.556693] env[68217]: DEBUG nova.compute.manager [None req-73bea599-899a-46d8-ac79-6cfeee2647c4 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.557604] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3244b0b3-fe27-4baf-bc7c-c8975562de82 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.610512] env[68217]: INFO nova.compute.resource_tracker [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating resource usage from migration 70d4a642-d476-4225-89a6-2b2183c2aa27 [ 1029.822932] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961839, 'name': PowerOnVM_Task, 'duration_secs': 0.89664} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.823422] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.823674] env[68217]: DEBUG nova.compute.manager [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1029.824595] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f6d81d-1fd5-45f5-94bd-38877a48c3a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.890493] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcf9172e-6036-42b3-a566-704cd252adf3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.904424] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00323780-8f91-49cb-8fe2-90be7ccb627d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.948745] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3d0a14-db53-48bd-9b7f-133e6c7eccd6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.958175] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-deafdd43-efee-485f-b9db-e154b3a354be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.973043] env[68217]: DEBUG nova.compute.provider_tree [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1029.998905] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961840, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.619198} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.999191] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d28bcf16-b081-4dc8-a975-2acaed222e15/d28bcf16-b081-4dc8-a975-2acaed222e15.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1030.000018] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1030.000018] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a2073410-7b30-4ddd-b442-20b84c0a7889 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.006846] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1030.006846] env[68217]: value = "task-2961841" [ 1030.006846] env[68217]: _type = "Task" [ 1030.006846] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.016949] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961841, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.157229] env[68217]: DEBUG nova.compute.manager [req-2c23c49a-6719-4806-854c-33f45bd3c026 req-7a6ec6e8-f0e6-4bea-add1-72c005caff1e service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-vif-plugged-bd03a7d5-905c-4c0d-9972-f46ba5682fd7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.157504] env[68217]: DEBUG oslo_concurrency.lockutils [req-2c23c49a-6719-4806-854c-33f45bd3c026 req-7a6ec6e8-f0e6-4bea-add1-72c005caff1e service nova] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.158405] env[68217]: DEBUG oslo_concurrency.lockutils [req-2c23c49a-6719-4806-854c-33f45bd3c026 req-7a6ec6e8-f0e6-4bea-add1-72c005caff1e service nova] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.158815] env[68217]: DEBUG oslo_concurrency.lockutils [req-2c23c49a-6719-4806-854c-33f45bd3c026 req-7a6ec6e8-f0e6-4bea-add1-72c005caff1e service nova] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.158815] env[68217]: DEBUG nova.compute.manager [req-2c23c49a-6719-4806-854c-33f45bd3c026 req-7a6ec6e8-f0e6-4bea-add1-72c005caff1e service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] No waiting events found dispatching network-vif-plugged-bd03a7d5-905c-4c0d-9972-f46ba5682fd7 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1030.158994] env[68217]: WARNING nova.compute.manager [req-2c23c49a-6719-4806-854c-33f45bd3c026 req-7a6ec6e8-f0e6-4bea-add1-72c005caff1e service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received unexpected event network-vif-plugged-bd03a7d5-905c-4c0d-9972-f46ba5682fd7 for instance with vm_state active and task_state None. [ 1030.204790] env[68217]: DEBUG nova.network.neutron [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Successfully updated port: 0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.340267] env[68217]: INFO nova.compute.manager [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] bringing vm to original state: 'stopped' [ 1030.344166] env[68217]: DEBUG nova.compute.manager [req-a60adfa2-ad8b-4554-add0-43cdc5ae64cd req-6334c974-e910-45ce-9f7d-0450e542062b service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Received event network-vif-plugged-0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1030.344403] env[68217]: DEBUG oslo_concurrency.lockutils [req-a60adfa2-ad8b-4554-add0-43cdc5ae64cd req-6334c974-e910-45ce-9f7d-0450e542062b service nova] Acquiring lock "1faf45fb-a3b0-4647-b63d-3f51695b6171-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1030.344604] env[68217]: DEBUG oslo_concurrency.lockutils [req-a60adfa2-ad8b-4554-add0-43cdc5ae64cd req-6334c974-e910-45ce-9f7d-0450e542062b service nova] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.344767] env[68217]: DEBUG oslo_concurrency.lockutils [req-a60adfa2-ad8b-4554-add0-43cdc5ae64cd req-6334c974-e910-45ce-9f7d-0450e542062b service nova] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.345348] env[68217]: DEBUG nova.compute.manager [req-a60adfa2-ad8b-4554-add0-43cdc5ae64cd req-6334c974-e910-45ce-9f7d-0450e542062b service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] No waiting events found dispatching network-vif-plugged-0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1030.345348] env[68217]: WARNING nova.compute.manager [req-a60adfa2-ad8b-4554-add0-43cdc5ae64cd req-6334c974-e910-45ce-9f7d-0450e542062b service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Received unexpected event network-vif-plugged-0ba43f81-704a-45f6-b856-293799e1bccc for instance with vm_state building and task_state spawning. [ 1030.477728] env[68217]: DEBUG nova.scheduler.client.report [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1030.516732] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961841, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081476} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.517109] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1030.517811] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-864cd213-4706-4a2e-a316-b64e131c58f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.540086] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] d28bcf16-b081-4dc8-a975-2acaed222e15/d28bcf16-b081-4dc8-a975-2acaed222e15.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1030.540348] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e55f61dc-a176-4212-9d06-93ac7339c034 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.556649] env[68217]: DEBUG nova.network.neutron [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Successfully updated port: bd03a7d5-905c-4c0d-9972-f46ba5682fd7 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1030.563783] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1030.563783] env[68217]: value = "task-2961842" [ 1030.563783] env[68217]: _type = "Task" [ 1030.563783] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.572686] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961842, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.712590] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.712590] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1030.712590] env[68217]: DEBUG nova.network.neutron [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1030.982564] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.385s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1030.982790] env[68217]: INFO nova.compute.manager [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Migrating [ 1030.989837] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.609s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1030.990100] env[68217]: DEBUG nova.objects.instance [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lazy-loading 'resources' on Instance uuid 33802025-7f72-4ad9-80fe-b15196b1a577 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1031.059437] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.059723] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.059820] env[68217]: DEBUG nova.network.neutron [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.075098] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961842, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.282161] env[68217]: DEBUG nova.network.neutron [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1031.350199] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1031.350464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1031.350644] env[68217]: DEBUG nova.compute.manager [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1031.352176] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b207815-0fb3-4d47-a48e-cc300a7ac2d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.362391] env[68217]: DEBUG nova.compute.manager [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1031.505054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.505292] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1031.505471] env[68217]: DEBUG nova.network.neutron [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1031.516686] env[68217]: DEBUG nova.network.neutron [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Updating instance_info_cache with network_info: [{"id": "0ba43f81-704a-45f6-b856-293799e1bccc", "address": "fa:16:3e:a9:49:77", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ba43f81-70", "ovs_interfaceid": "0ba43f81-704a-45f6-b856-293799e1bccc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.579095] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961842, 'name': ReconfigVM_Task, 'duration_secs': 0.524818} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.579095] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Reconfigured VM instance instance-00000064 to attach disk [datastore1] d28bcf16-b081-4dc8-a975-2acaed222e15/d28bcf16-b081-4dc8-a975-2acaed222e15.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1031.579095] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2edfd07d-f58a-42a9-a25a-332d167c770a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.586159] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1031.586159] env[68217]: value = "task-2961843" [ 1031.586159] env[68217]: _type = "Task" [ 1031.586159] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.599367] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961843, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.612355] env[68217]: WARNING nova.network.neutron [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] 246af4c9-69b4-4542-84b9-2afe67cf297a already exists in list: networks containing: ['246af4c9-69b4-4542-84b9-2afe67cf297a']. ignoring it [ 1031.816157] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac3fa7d-11f5-4ac8-8aff-8df2a5eaa7c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.826362] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60df5ebe-372b-45a9-82e2-e52cdff30912 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.858458] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c96e9323-3c5c-40e9-9161-964d01923e92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.869645] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8bb8fa6-fbfe-41b8-bbd2-1ef37b16242d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.875253] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1031.875253] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-64916db6-6704-4796-a717-a6a0bcf6c5cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.885843] env[68217]: DEBUG nova.compute.provider_tree [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.892023] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1031.892023] env[68217]: value = "task-2961844" [ 1031.892023] env[68217]: _type = "Task" [ 1031.892023] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.897368] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961844, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.025075] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.025075] env[68217]: DEBUG nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Instance network_info: |[{"id": "0ba43f81-704a-45f6-b856-293799e1bccc", "address": "fa:16:3e:a9:49:77", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ba43f81-70", "ovs_interfaceid": "0ba43f81-704a-45f6-b856-293799e1bccc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1032.025075] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:49:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0ba43f81-704a-45f6-b856-293799e1bccc', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1032.033056] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1032.033862] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1032.034125] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7567f321-176d-47c0-80de-8e6a222e148e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.061772] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1032.061772] env[68217]: value = "task-2961845" [ 1032.061772] env[68217]: _type = "Task" [ 1032.061772] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.070748] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961845, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.104278] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961843, 'name': Rename_Task, 'duration_secs': 0.174516} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.104565] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1032.104819] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f4ac00d-91f0-4cb1-b4f9-fb4f9bc7b54f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.112371] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1032.112371] env[68217]: value = "task-2961846" [ 1032.112371] env[68217]: _type = "Task" [ 1032.112371] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.120716] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961846, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.329374] env[68217]: DEBUG nova.network.neutron [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "address": "fa:16:3e:71:68:79", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd03a7d5-90", "ovs_interfaceid": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.346593] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c6f688-1191-8ada-0af1-ce794aca4922/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1032.347098] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da57e3cd-fef9-487d-a347-025babccaa3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.353723] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c6f688-1191-8ada-0af1-ce794aca4922/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1032.354090] env[68217]: ERROR oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c6f688-1191-8ada-0af1-ce794aca4922/disk-0.vmdk due to incomplete transfer. [ 1032.354369] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-c5c81a30-e5c4-4c9a-8944-7b2caabee9b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.364675] env[68217]: DEBUG oslo_vmware.rw_handles [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52c6f688-1191-8ada-0af1-ce794aca4922/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1032.365361] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Uploaded image 6e605951-4bf8-4242-b4cf-a140328995ff to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1032.367645] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1032.370846] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-4d848ffb-15eb-4248-9ec4-c14a3b233722 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.377606] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1032.377606] env[68217]: value = "task-2961847" [ 1032.377606] env[68217]: _type = "Task" [ 1032.377606] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.385919] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961847, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.390711] env[68217]: DEBUG nova.scheduler.client.report [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1032.402855] env[68217]: DEBUG oslo_vmware.api [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961844, 'name': PowerOffVM_Task, 'duration_secs': 0.222954} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.403737] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1032.403737] env[68217]: DEBUG nova.compute.manager [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1032.404670] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad8a477-f735-44a3-99ad-fe8e594f71c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.572207] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961845, 'name': CreateVM_Task, 'duration_secs': 0.366172} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.572358] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1032.573116] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.573290] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.573645] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1032.573898] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b3f5444e-d051-45b0-a27d-e6242a603e45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.578733] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1032.578733] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea81b3-5cd3-fe9a-c78c-959a45a20938" [ 1032.578733] env[68217]: _type = "Task" [ 1032.578733] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.586785] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea81b3-5cd3-fe9a-c78c-959a45a20938, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.623071] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961846, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.790411] env[68217]: DEBUG nova.compute.manager [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Received event network-changed-0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1032.790717] env[68217]: DEBUG nova.compute.manager [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Refreshing instance network info cache due to event network-changed-0ba43f81-704a-45f6-b856-293799e1bccc. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1032.791755] env[68217]: DEBUG oslo_concurrency.lockutils [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] Acquiring lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.792036] env[68217]: DEBUG oslo_concurrency.lockutils [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] Acquired lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.792301] env[68217]: DEBUG nova.network.neutron [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Refreshing network info cache for port 0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1032.808560] env[68217]: DEBUG nova.network.neutron [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance_info_cache with network_info: [{"id": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "address": "fa:16:3e:43:19:4b", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa112e6da-1e", "ovs_interfaceid": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.833466] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1032.834160] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1032.834334] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1032.835177] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74e805d0-2e99-4641-849b-cb63a72e09de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.855228] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1032.855538] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1032.855618] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1032.855782] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1032.855924] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1032.856089] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1032.856495] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1032.856670] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1032.856840] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1032.857073] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1032.857281] env[68217]: DEBUG nova.virt.hardware [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1032.864752] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfiguring VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1032.865914] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-37f129b8-cc02-4659-acc9-37bf753897ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.888310] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961847, 'name': Destroy_Task, 'duration_secs': 0.311944} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.889642] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Destroyed the VM [ 1032.889887] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1032.890214] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1032.890214] env[68217]: value = "task-2961848" [ 1032.890214] env[68217]: _type = "Task" [ 1032.890214] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.890405] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-d049e128-7137-42de-9f8e-3566f13e32dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.900470] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.911s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.902721] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.904270] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.189s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1032.905718] env[68217]: INFO nova.compute.claims [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1032.908322] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1032.908322] env[68217]: value = "task-2961849" [ 1032.908322] env[68217]: _type = "Task" [ 1032.908322] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.917359] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961849, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.926940] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.575s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1032.940032] env[68217]: INFO nova.scheduler.client.report [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Deleted allocations for instance 33802025-7f72-4ad9-80fe-b15196b1a577 [ 1033.090062] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ea81b3-5cd3-fe9a-c78c-959a45a20938, 'name': SearchDatastore_Task, 'duration_secs': 0.011682} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.090366] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.090591] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1033.090777] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.090920] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1033.091109] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1033.091373] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7bd5486b-bc22-46cd-8edf-a8689668b508 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.099278] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1033.099454] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1033.100156] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-988f351c-9665-43f1-bf2f-159c9be51cbb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.105036] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1033.105036] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5234ee18-e79f-77fd-4370-485be431bf62" [ 1033.105036] env[68217]: _type = "Task" [ 1033.105036] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.112516] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5234ee18-e79f-77fd-4370-485be431bf62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.120486] env[68217]: DEBUG oslo_vmware.api [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961846, 'name': PowerOnVM_Task, 'duration_secs': 0.807602} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.120724] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1033.120939] env[68217]: INFO nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Took 8.84 seconds to spawn the instance on the hypervisor. [ 1033.121133] env[68217]: DEBUG nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1033.121830] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8423b18c-5dcf-45d0-99c9-ec84fd5d41c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.312280] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1033.402179] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.422185] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961849, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.433174] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1033.449902] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a869bb1c-8b9e-4f21-aaa7-e0474b400f45 tempest-ServerMetadataNegativeTestJSON-946227196 tempest-ServerMetadataNegativeTestJSON-946227196-project-member] Lock "33802025-7f72-4ad9-80fe-b15196b1a577" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.628s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1033.615366] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5234ee18-e79f-77fd-4370-485be431bf62, 'name': SearchDatastore_Task, 'duration_secs': 0.008035} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.616959] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbeed942-da1c-44b5-b376-bd025da632cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.624151] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1033.624151] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529740cd-4eae-c4e9-2c13-0274c08bef17" [ 1033.624151] env[68217]: _type = "Task" [ 1033.624151] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.637788] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529740cd-4eae-c4e9-2c13-0274c08bef17, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.638402] env[68217]: INFO nova.compute.manager [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Took 14.98 seconds to build instance. [ 1033.703612] env[68217]: DEBUG nova.network.neutron [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Updated VIF entry in instance network info cache for port 0ba43f81-704a-45f6-b856-293799e1bccc. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1033.703612] env[68217]: DEBUG nova.network.neutron [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Updating instance_info_cache with network_info: [{"id": "0ba43f81-704a-45f6-b856-293799e1bccc", "address": "fa:16:3e:a9:49:77", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ba43f81-70", "ovs_interfaceid": "0ba43f81-704a-45f6-b856-293799e1bccc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.904583] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.923889] env[68217]: DEBUG oslo_vmware.api [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961849, 'name': RemoveSnapshot_Task, 'duration_secs': 0.528395} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.926706] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1033.926706] env[68217]: INFO nova.compute.manager [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Took 15.04 seconds to snapshot the instance on the hypervisor. [ 1034.132599] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529740cd-4eae-c4e9-2c13-0274c08bef17, 'name': SearchDatastore_Task, 'duration_secs': 0.009316} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.132958] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.133852] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1faf45fb-a3b0-4647-b63d-3f51695b6171/1faf45fb-a3b0-4647-b63d-3f51695b6171.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1034.133852] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f3300c05-3663-44d8-9a9a-150b7952bbb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.140117] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1034.140117] env[68217]: value = "task-2961850" [ 1034.140117] env[68217]: _type = "Task" [ 1034.140117] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.143438] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74028850-4ce6-4d31-9aed-e7acab255731 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.496s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1034.150313] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961850, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.184545] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71ab75bd-d1c5-4272-865a-4ca93ee2438c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.193401] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-240e9e14-a20e-4950-b918-7d4536ba1159 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.228861] env[68217]: DEBUG oslo_concurrency.lockutils [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] Releasing lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1034.229143] env[68217]: DEBUG nova.compute.manager [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-changed-bd03a7d5-905c-4c0d-9972-f46ba5682fd7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1034.229430] env[68217]: DEBUG nova.compute.manager [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing instance network info cache due to event network-changed-bd03a7d5-905c-4c0d-9972-f46ba5682fd7. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1034.229696] env[68217]: DEBUG oslo_concurrency.lockutils [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.229883] env[68217]: DEBUG oslo_concurrency.lockutils [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1034.230099] env[68217]: DEBUG nova.network.neutron [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing network info cache for port bd03a7d5-905c-4c0d-9972-f46ba5682fd7 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1034.232564] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1161be5a-783c-43cd-a834-24f79a23c76d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.241471] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12f12043-2be4-460e-a761-7f98d059ac42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.256660] env[68217]: DEBUG nova.compute.provider_tree [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1034.322090] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1034.322337] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1034.408103] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.478047] env[68217]: DEBUG nova.compute.manager [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Found 3 images (rotation: 2) {{(pid=68217) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5017}} [ 1034.478271] env[68217]: DEBUG nova.compute.manager [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Rotating out 1 backups {{(pid=68217) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5025}} [ 1034.478435] env[68217]: DEBUG nova.compute.manager [None req-28504f02-f80f-463e-9efc-f02bc824b1e4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleting image e2ae22af-290f-4e09-86ea-4b5f1f275371 {{(pid=68217) _rotate_backups /opt/stack/nova/nova/compute/manager.py:5030}} [ 1034.655058] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961850, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.476697} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.655058] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1faf45fb-a3b0-4647-b63d-3f51695b6171/1faf45fb-a3b0-4647-b63d-3f51695b6171.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1034.655058] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1034.655058] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a1756fa5-ce22-45be-9604-1776ae62888c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.661675] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1034.661675] env[68217]: value = "task-2961851" [ 1034.661675] env[68217]: _type = "Task" [ 1034.661675] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.671223] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961851, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.759558] env[68217]: DEBUG nova.scheduler.client.report [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1034.829367] env[68217]: DEBUG nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1034.835570] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dd64d54-ff90-48b0-8bf0-6794b2a703b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.862093] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance 'ffff4cf4-f663-4965-84d1-8351bfde1252' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1034.913092] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.158022] env[68217]: DEBUG nova.network.neutron [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updated VIF entry in instance network info cache for port bd03a7d5-905c-4c0d-9972-f46ba5682fd7. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1035.158022] env[68217]: DEBUG nova.network.neutron [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "address": "fa:16:3e:71:68:79", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd03a7d5-90", "ovs_interfaceid": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.180242] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961851, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.261099] env[68217]: DEBUG nova.compute.manager [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Received event network-changed-7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1035.261099] env[68217]: DEBUG nova.compute.manager [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Refreshing instance network info cache due to event network-changed-7ca41605-8ab9-4d01-835b-70d47e78fce9. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1035.261099] env[68217]: DEBUG oslo_concurrency.lockutils [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.261099] env[68217]: DEBUG oslo_concurrency.lockutils [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1035.261099] env[68217]: DEBUG nova.network.neutron [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Refreshing network info cache for port 7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1035.270325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.366s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.270934] env[68217]: DEBUG nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1035.275267] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 1.842s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.275640] env[68217]: DEBUG nova.objects.instance [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1035.361483] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.371023] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.371023] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-629b23bb-ef26-4126-95fc-2f6ddb5a59cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.378407] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1035.378407] env[68217]: value = "task-2961852" [ 1035.378407] env[68217]: _type = "Task" [ 1035.378407] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.388024] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961852, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.410236] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.539144] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.539144] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.539144] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1035.539144] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1035.539510] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1035.541533] env[68217]: INFO nova.compute.manager [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Terminating instance [ 1035.660671] env[68217]: DEBUG oslo_concurrency.lockutils [req-32604cd7-1240-4b41-8dac-d59df4f0d7d9 req-30946e25-193a-4bf9-b884-688f3a218232 service nova] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1035.672923] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961851, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.784456] env[68217]: DEBUG nova.compute.utils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1035.790895] env[68217]: DEBUG nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1035.791078] env[68217]: DEBUG nova.network.neutron [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1035.865322] env[68217]: DEBUG nova.policy [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fd08981ea724019826d597a1c8b4ecd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6233e9874c41329f81c990f8bc72b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1035.888736] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961852, 'name': PowerOffVM_Task, 'duration_secs': 0.369315} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1035.889689] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1035.889689] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance 'ffff4cf4-f663-4965-84d1-8351bfde1252' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1035.908163] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.045218] env[68217]: DEBUG nova.compute.manager [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1036.045448] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.046771] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ab07141-b90d-40a4-b390-bef89d6b9af3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.055836] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.056126] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8b38485f-d4ed-46a3-a51f-45bdb9026aa1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.163436] env[68217]: DEBUG nova.network.neutron [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updated VIF entry in instance network info cache for port 7ca41605-8ab9-4d01-835b-70d47e78fce9. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1036.164255] env[68217]: DEBUG nova.network.neutron [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.174706] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961851, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.023429} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.175014] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1036.177682] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19c0291-0db7-4d64-baac-3cde5b49ceb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.202347] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 1faf45fb-a3b0-4647-b63d-3f51695b6171/1faf45fb-a3b0-4647-b63d-3f51695b6171.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1036.202909] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6a68cf9-6262-4db7-be93-6f5bb460de0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.225253] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1036.225253] env[68217]: value = "task-2961854" [ 1036.225253] env[68217]: _type = "Task" [ 1036.225253] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.972774] env[68217]: DEBUG nova.network.neutron [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Successfully created port: a5a64da9-f003-45ab-9b90-62488204fc29 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1036.974925] env[68217]: DEBUG nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1036.977931] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9d9a559a-657f-4024-9bc5-74d95375bad0 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.703s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1036.980196] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1036.980410] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1036.980562] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1036.980736] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1036.980876] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1036.981027] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1036.981225] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1036.981383] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1036.981542] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1036.981712] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1036.981874] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1036.986737] env[68217]: DEBUG oslo_concurrency.lockutils [req-3678e6a3-6237-4885-b0e5-b5541e01d3d4 req-f9d2a61e-6c88-4d59-a4b9-285fec987394 service nova] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1036.988748] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.988832] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.989179] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleting the datastore file [datastore1] df4c3a34-2dea-4f82-9ea6-7a9eb1c03179 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.989833] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.629s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1036.992268] env[68217]: INFO nova.compute.claims [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1036.993301] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cd8cf4cf-343b-46d1-a06e-161ae81dbf26 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.006305] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0a1df0f8-6b4c-4329-a884-7a31247894be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.011539] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961854, 'name': ReconfigVM_Task, 'duration_secs': 0.374856} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.012530] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 1faf45fb-a3b0-4647-b63d-3f51695b6171/1faf45fb-a3b0-4647-b63d-3f51695b6171.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1037.013133] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c72fd713-c1dd-4b51-b585-1be37cd4e8af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.018370] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.020957] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1037.020957] env[68217]: value = "task-2961855" [ 1037.020957] env[68217]: _type = "Task" [ 1037.020957] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.021464] env[68217]: DEBUG oslo_vmware.api [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1037.021464] env[68217]: value = "task-2961856" [ 1037.021464] env[68217]: _type = "Task" [ 1037.021464] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.029238] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1037.029238] env[68217]: value = "task-2961857" [ 1037.029238] env[68217]: _type = "Task" [ 1037.029238] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.042623] env[68217]: DEBUG oslo_vmware.api [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961856, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.042808] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961855, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.048096] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961857, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.495311] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1037.495642] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1037.495753] env[68217]: DEBUG nova.compute.manager [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1037.496783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716d0eca-503f-462c-bba5-7ac44bf51121 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.503815] env[68217]: DEBUG nova.compute.manager [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1037.504554] env[68217]: DEBUG nova.objects.instance [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'flavor' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1037.508941] env[68217]: DEBUG oslo_vmware.api [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961848, 'name': ReconfigVM_Task, 'duration_secs': 4.408597} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.509100] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1037.509307] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfigured VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1037.537975] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961855, 'name': ReconfigVM_Task, 'duration_secs': 0.161612} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.541013] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance 'ffff4cf4-f663-4965-84d1-8351bfde1252' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1037.544216] env[68217]: DEBUG oslo_vmware.api [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961856, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.148917} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.544898] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.545115] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.545329] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.545499] env[68217]: INFO nova.compute.manager [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Took 1.50 seconds to destroy the instance on the hypervisor. [ 1037.545730] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1037.545910] env[68217]: DEBUG nova.compute.manager [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1037.546038] env[68217]: DEBUG nova.network.neutron [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1037.550367] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961857, 'name': Rename_Task, 'duration_secs': 0.153479} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.550861] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1037.551098] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-48f34008-cc1f-4d5b-8eac-471a4c1c2d6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.557048] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1037.557048] env[68217]: value = "task-2961858" [ 1037.557048] env[68217]: _type = "Task" [ 1037.557048] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.564267] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961858, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.806781] env[68217]: DEBUG nova.compute.manager [req-9e96d17c-68cb-41b8-8718-ec9cbc5f0881 req-8808fe52-9150-425f-8447-9305f86aba09 service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Received event network-vif-deleted-b5d29b01-b9b6-4d25-9fef-4a335cf05875 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1037.807015] env[68217]: INFO nova.compute.manager [req-9e96d17c-68cb-41b8-8718-ec9cbc5f0881 req-8808fe52-9150-425f-8447-9305f86aba09 service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Neutron deleted interface b5d29b01-b9b6-4d25-9fef-4a335cf05875; detaching it from the instance and deleting it from the info cache [ 1037.807309] env[68217]: DEBUG nova.network.neutron [req-9e96d17c-68cb-41b8-8718-ec9cbc5f0881 req-8808fe52-9150-425f-8447-9305f86aba09 service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.998421] env[68217]: DEBUG nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1038.016034] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37e15b4-7958-4440-8242-906855a36289 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 12.046s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.030927] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.031219] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.031525] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.031584] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.032029] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.032029] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.032130] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.032542] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.032542] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.032639] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.032800] env[68217]: DEBUG nova.virt.hardware [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.035049] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4124cee8-a908-4675-9ecc-8a7d7df9e5c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.044303] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bd1689-801c-4924-8366-3e8fb8c9d4bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.052886] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1038.053127] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1038.053289] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1038.053468] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1038.053650] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1038.053745] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1038.053940] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1038.054113] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1038.054280] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1038.054437] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1038.054607] env[68217]: DEBUG nova.virt.hardware [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1038.059905] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Reconfiguring VM instance instance-00000063 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1038.060924] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fe4cd707-7a23-415b-8423-db29ca1b80b8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.093632] env[68217]: DEBUG oslo_vmware.api [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961858, 'name': PowerOnVM_Task, 'duration_secs': 0.475601} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.095602] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1038.095602] env[68217]: INFO nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Took 9.41 seconds to spawn the instance on the hypervisor. [ 1038.095602] env[68217]: DEBUG nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1038.095602] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1038.095602] env[68217]: value = "task-2961859" [ 1038.095602] env[68217]: _type = "Task" [ 1038.095602] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.096696] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15f17a53-3c10-4240-81af-f3187a0caa8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.109213] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961859, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.276014] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46354c94-e55a-432f-99aa-afc0b1d1e45c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.283535] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2405ec38-c72f-43fd-93d2-361db51323f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.286738] env[68217]: DEBUG nova.network.neutron [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.317438] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3034a5c9-7d5d-4098-9973-3ea0bcde3d3b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.319922] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64cbec30-3e2a-4ab1-b8f5-a9d58b568c4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.329197] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339551ce-fb40-4983-a2a7-10b1843ab40d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.335878] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c200f3-98f4-486a-af3b-3c07cb8a9f65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.355887] env[68217]: DEBUG nova.compute.provider_tree [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1038.368145] env[68217]: DEBUG nova.compute.manager [req-9e96d17c-68cb-41b8-8718-ec9cbc5f0881 req-8808fe52-9150-425f-8447-9305f86aba09 service nova] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Detach interface failed, port_id=b5d29b01-b9b6-4d25-9fef-4a335cf05875, reason: Instance df4c3a34-2dea-4f82-9ea6-7a9eb1c03179 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1038.369216] env[68217]: DEBUG nova.scheduler.client.report [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1038.514328] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1038.514626] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-67c51f21-e327-4b56-8324-21e5e909e546 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.522402] env[68217]: DEBUG oslo_vmware.api [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1038.522402] env[68217]: value = "task-2961860" [ 1038.522402] env[68217]: _type = "Task" [ 1038.522402] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.531035] env[68217]: DEBUG oslo_vmware.api [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961860, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.609952] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961859, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.610873] env[68217]: DEBUG nova.network.neutron [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Successfully updated port: a5a64da9-f003-45ab-9b90-62488204fc29 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1038.627331] env[68217]: INFO nova.compute.manager [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Took 15.57 seconds to build instance. [ 1038.790684] env[68217]: INFO nova.compute.manager [-] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Took 1.24 seconds to deallocate network for instance. [ 1038.873753] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.884s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1038.874293] env[68217]: DEBUG nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1038.902460] env[68217]: DEBUG nova.compute.manager [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Received event network-changed-0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1038.902590] env[68217]: DEBUG nova.compute.manager [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Refreshing instance network info cache due to event network-changed-0ba43f81-704a-45f6-b856-293799e1bccc. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1038.902838] env[68217]: DEBUG oslo_concurrency.lockutils [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] Acquiring lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.903035] env[68217]: DEBUG oslo_concurrency.lockutils [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] Acquired lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1038.903982] env[68217]: DEBUG nova.network.neutron [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Refreshing network info cache for port 0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1039.033240] env[68217]: DEBUG oslo_vmware.api [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961860, 'name': PowerOffVM_Task, 'duration_secs': 0.216625} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.033505] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1039.033696] env[68217]: DEBUG nova.compute.manager [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1039.034458] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8534c07-6f63-4b3b-8583-1a5db4616b27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.110013] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961859, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.113668] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "refresh_cache-08ba7156-1c6d-4385-939c-bdd575c7fda3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1039.113803] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "refresh_cache-08ba7156-1c6d-4385-939c-bdd575c7fda3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1039.113947] env[68217]: DEBUG nova.network.neutron [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1039.129671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a862b74b-b013-4d49-816c-4ba29b10526b tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.077s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.299288] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.299666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.299909] env[68217]: DEBUG nova.objects.instance [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'resources' on Instance uuid df4c3a34-2dea-4f82-9ea6-7a9eb1c03179 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.379043] env[68217]: DEBUG nova.compute.utils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1039.380483] env[68217]: DEBUG nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1039.380644] env[68217]: DEBUG nova.network.neutron [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1039.460941] env[68217]: DEBUG nova.policy [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '03904e82faab40849ad7b7818bf2a121', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1424003d74424a9e84d15879f2e634e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1039.548862] env[68217]: DEBUG oslo_concurrency.lockutils [None req-2aaade35-3f2a-41b2-ba45-d6cae6481fb1 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.053s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1039.618122] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961859, 'name': ReconfigVM_Task, 'duration_secs': 1.419323} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.618586] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Reconfigured VM instance instance-00000063 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1039.620995] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bbf8d3c-b0a4-47c5-9908-8ba0b7cb9fd4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.648339] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Reconfiguring VM instance instance-00000063 to attach disk [datastore1] ffff4cf4-f663-4965-84d1-8351bfde1252/ffff4cf4-f663-4965-84d1-8351bfde1252.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1039.648642] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-34be1509-a991-45b3-9801-cc88b0334377 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.670333] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1039.670333] env[68217]: value = "task-2961861" [ 1039.670333] env[68217]: _type = "Task" [ 1039.670333] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.679043] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961861, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.679792] env[68217]: DEBUG nova.network.neutron [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1039.835372] env[68217]: DEBUG oslo_concurrency.lockutils [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1039.835372] env[68217]: DEBUG oslo_concurrency.lockutils [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1039.888739] env[68217]: DEBUG nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1040.056328] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-ff379aaa-198c-405e-ae1f-591e6f4cc070" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.056592] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-ff379aaa-198c-405e-ae1f-591e6f4cc070" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.056931] env[68217]: DEBUG nova.objects.instance [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'flavor' on Instance uuid 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.070675] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c67bd33-f5c3-4faf-ad68-d4ed8c2fa7f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.079461] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95704c04-f299-46b9-9197-e643bcb3207d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.110549] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dbd2a1-9b71-4b1a-aa2c-3233240c6380 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.117783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1afd145-06f6-48dd-8e84-3322f9977f83 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.133773] env[68217]: DEBUG nova.compute.provider_tree [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1040.181036] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961861, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.229708] env[68217]: DEBUG nova.network.neutron [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Updated VIF entry in instance network info cache for port 0ba43f81-704a-45f6-b856-293799e1bccc. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1040.230068] env[68217]: DEBUG nova.network.neutron [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Updating instance_info_cache with network_info: [{"id": "0ba43f81-704a-45f6-b856-293799e1bccc", "address": "fa:16:3e:a9:49:77", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.178", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0ba43f81-70", "ovs_interfaceid": "0ba43f81-704a-45f6-b856-293799e1bccc", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.277098] env[68217]: DEBUG nova.compute.manager [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Received event network-vif-plugged-a5a64da9-f003-45ab-9b90-62488204fc29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1040.277333] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] Acquiring lock "08ba7156-1c6d-4385-939c-bdd575c7fda3-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1040.277982] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1040.278205] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1040.278381] env[68217]: DEBUG nova.compute.manager [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] No waiting events found dispatching network-vif-plugged-a5a64da9-f003-45ab-9b90-62488204fc29 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1040.278547] env[68217]: WARNING nova.compute.manager [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Received unexpected event network-vif-plugged-a5a64da9-f003-45ab-9b90-62488204fc29 for instance with vm_state building and task_state spawning. [ 1040.278706] env[68217]: DEBUG nova.compute.manager [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Received event network-changed-a5a64da9-f003-45ab-9b90-62488204fc29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1040.278857] env[68217]: DEBUG nova.compute.manager [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Refreshing instance network info cache due to event network-changed-a5a64da9-f003-45ab-9b90-62488204fc29. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1040.279036] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] Acquiring lock "refresh_cache-08ba7156-1c6d-4385-939c-bdd575c7fda3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.286139] env[68217]: DEBUG nova.network.neutron [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Updating instance_info_cache with network_info: [{"id": "a5a64da9-f003-45ab-9b90-62488204fc29", "address": "fa:16:3e:4e:8f:8b", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a64da9-f0", "ovs_interfaceid": "a5a64da9-f003-45ab-9b90-62488204fc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.340657] env[68217]: INFO nova.compute.manager [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Detaching volume 3e056b4f-fea1-46b1-b841-37e7f391cb46 [ 1040.378709] env[68217]: INFO nova.virt.block_device [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Attempting to driver detach volume 3e056b4f-fea1-46b1-b841-37e7f391cb46 from mountpoint /dev/sdb [ 1040.378942] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1040.379192] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594343', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'name': 'volume-3e056b4f-fea1-46b1-b841-37e7f391cb46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd14026b1-84dd-430e-be94-94dcb1f47473', 'attached_at': '', 'detached_at': '', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'serial': '3e056b4f-fea1-46b1-b841-37e7f391cb46'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1040.380027] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d82a9b1-fd12-4474-9fa6-58b7395d0300 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.404948] env[68217]: DEBUG nova.network.neutron [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Successfully created port: 753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1040.407246] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6cbaf8a-7902-4772-989f-7a026a458586 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.415284] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a4dac8f-a17f-47e3-83e4-b0e19a985564 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.436221] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2a130e-ff32-4b8d-9362-3a0010f9a788 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.454282] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] The volume has not been displaced from its original location: [datastore2] volume-3e056b4f-fea1-46b1-b841-37e7f391cb46/volume-3e056b4f-fea1-46b1-b841-37e7f391cb46.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1040.461734] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfiguring VM instance instance-0000002d to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1040.461734] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-48c87998-5f04-4bb3-bfb3-745da285d329 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.480198] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1040.480198] env[68217]: value = "task-2961862" [ 1040.480198] env[68217]: _type = "Task" [ 1040.480198] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.488635] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961862, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.526948] env[68217]: DEBUG nova.compute.manager [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Stashing vm_state: stopped {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1040.639145] env[68217]: DEBUG nova.scheduler.client.report [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1040.681128] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961861, 'name': ReconfigVM_Task, 'duration_secs': 0.863867} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.681428] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Reconfigured VM instance instance-00000063 to attach disk [datastore1] ffff4cf4-f663-4965-84d1-8351bfde1252/ffff4cf4-f663-4965-84d1-8351bfde1252.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1040.681719] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance 'ffff4cf4-f663-4965-84d1-8351bfde1252' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1040.731788] env[68217]: DEBUG nova.objects.instance [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'pci_requests' on Instance uuid 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.733139] env[68217]: DEBUG oslo_concurrency.lockutils [req-7a717a49-9add-4e3a-88ad-4466ec34abe3 req-45353444-d974-4d2f-83cd-f0b415b36504 service nova] Releasing lock "refresh_cache-1faf45fb-a3b0-4647-b63d-3f51695b6171" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.788762] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "refresh_cache-08ba7156-1c6d-4385-939c-bdd575c7fda3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1040.789140] env[68217]: DEBUG nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Instance network_info: |[{"id": "a5a64da9-f003-45ab-9b90-62488204fc29", "address": "fa:16:3e:4e:8f:8b", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a64da9-f0", "ovs_interfaceid": "a5a64da9-f003-45ab-9b90-62488204fc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1040.789461] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] Acquired lock "refresh_cache-08ba7156-1c6d-4385-939c-bdd575c7fda3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1040.789685] env[68217]: DEBUG nova.network.neutron [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Refreshing network info cache for port a5a64da9-f003-45ab-9b90-62488204fc29 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1040.792942] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4e:8f:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a5a64da9-f003-45ab-9b90-62488204fc29', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1040.803291] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1040.806691] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1040.807168] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b7de3753-15e3-4e32-86fb-e37778173cc1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.830244] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1040.830244] env[68217]: value = "task-2961863" [ 1040.830244] env[68217]: _type = "Task" [ 1040.830244] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.838087] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961863, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.911981] env[68217]: DEBUG nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1040.948486] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1040.948742] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1040.948898] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1040.949139] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1040.949306] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1040.949455] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1040.949658] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1040.949814] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1040.949978] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1040.950168] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1040.950337] env[68217]: DEBUG nova.virt.hardware [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1040.951225] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c30740-8267-4b7f-8335-4aeb1ebdf54c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.959706] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e81be92a-b4fd-403b-ad70-2d236b354011 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.989227] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.037995] env[68217]: DEBUG nova.network.neutron [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Updated VIF entry in instance network info cache for port a5a64da9-f003-45ab-9b90-62488204fc29. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1041.038443] env[68217]: DEBUG nova.network.neutron [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Updating instance_info_cache with network_info: [{"id": "a5a64da9-f003-45ab-9b90-62488204fc29", "address": "fa:16:3e:4e:8f:8b", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa5a64da9-f0", "ovs_interfaceid": "a5a64da9-f003-45ab-9b90-62488204fc29", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1041.052275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1041.144442] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.845s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.146793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.095s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1041.165784] env[68217]: INFO nova.scheduler.client.report [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted allocations for instance df4c3a34-2dea-4f82-9ea6-7a9eb1c03179 [ 1041.188786] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50eb0d4c-3d22-4626-b90e-c3ef2df98566 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.208265] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48319017-0ad5-490e-8ad3-8a5d14c8956c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.226125] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance 'ffff4cf4-f663-4965-84d1-8351bfde1252' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1041.234897] env[68217]: DEBUG nova.objects.base [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Object Instance<35c4ab95-fc14-4bd4-a2a5-64f15f070b88> lazy-loaded attributes: flavor,pci_requests {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1041.235115] env[68217]: DEBUG nova.network.neutron [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1041.298485] env[68217]: DEBUG nova.policy [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1041.339512] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961863, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.490526] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961862, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.541339] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5e42160-6677-4d09-a2e7-f3ffc260790a req-da30be2c-8782-4826-8702-439d789709ed service nova] Releasing lock "refresh_cache-08ba7156-1c6d-4385-939c-bdd575c7fda3" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1041.651837] env[68217]: INFO nova.compute.claims [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1041.676033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-19f7f4d0-dfd9-4ff7-930c-326c32520377 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "df4c3a34-2dea-4f82-9ea6-7a9eb1c03179" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.137s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1041.767592] env[68217]: DEBUG nova.network.neutron [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Port a112e6da-1e76-4618-b45e-229cbb5c0ebd binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1041.840234] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961863, 'name': CreateVM_Task, 'duration_secs': 0.989147} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.840234] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1041.840713] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1041.840879] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1041.841213] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1041.841469] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9ddd156d-4645-4cdd-b892-c0d3f12fdb6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.845993] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1041.845993] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fa569-760d-4ca1-1e6e-d43724218e95" [ 1041.845993] env[68217]: _type = "Task" [ 1041.845993] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.853432] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fa569-760d-4ca1-1e6e-d43724218e95, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.990579] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961862, 'name': ReconfigVM_Task, 'duration_secs': 1.240604} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.994017] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Reconfigured VM instance instance-0000002d to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1041.995699] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f60c30cd-e4da-4b29-8459-fce5716eb472 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.010789] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1042.010789] env[68217]: value = "task-2961864" [ 1042.010789] env[68217]: _type = "Task" [ 1042.010789] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.021061] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961864, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.099305] env[68217]: DEBUG nova.network.neutron [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Successfully updated port: 753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1042.157441] env[68217]: INFO nova.compute.resource_tracker [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating resource usage from migration fd057df1-4b70-4d8f-8028-ac2b12d1f0b0 [ 1042.310854] env[68217]: DEBUG nova.compute.manager [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Received event network-vif-plugged-753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.311135] env[68217]: DEBUG oslo_concurrency.lockutils [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] Acquiring lock "6b4dff91-254e-43cc-85cf-7de6214dcafd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.311328] env[68217]: DEBUG oslo_concurrency.lockutils [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.311447] env[68217]: DEBUG oslo_concurrency.lockutils [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.311659] env[68217]: DEBUG nova.compute.manager [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] No waiting events found dispatching network-vif-plugged-753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1042.311907] env[68217]: WARNING nova.compute.manager [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Received unexpected event network-vif-plugged-753c250a-569f-42f8-a9e7-fed02079c841 for instance with vm_state building and task_state spawning. [ 1042.312218] env[68217]: DEBUG nova.compute.manager [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Received event network-changed-753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1042.312431] env[68217]: DEBUG nova.compute.manager [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing instance network info cache due to event network-changed-753c250a-569f-42f8-a9e7-fed02079c841. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1042.312613] env[68217]: DEBUG oslo_concurrency.lockutils [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] Acquiring lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.312766] env[68217]: DEBUG oslo_concurrency.lockutils [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] Acquired lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.312924] env[68217]: DEBUG nova.network.neutron [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing network info cache for port 753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1042.359525] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fa569-760d-4ca1-1e6e-d43724218e95, 'name': SearchDatastore_Task, 'duration_secs': 0.010167} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.359856] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1042.360183] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1042.360332] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.360478] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1042.360653] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.360918] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-09ffde2c-9459-41e3-8de4-67c5e54ec3b4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.372292] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.372472] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1042.373285] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30df800f-8131-40bf-8688-624e47c3f3f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.381147] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1042.381147] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be362d-3e8d-a3a6-0ee0-0a8bcba62df8" [ 1042.381147] env[68217]: _type = "Task" [ 1042.381147] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.388274] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be362d-3e8d-a3a6-0ee0-0a8bcba62df8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.389670] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00a4a245-cf7e-49c4-9138-e3838fca4046 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.397070] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e129701-07d9-4135-a3f0-92811c8e56f2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.426643] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-509e972d-b67b-46ca-b84a-5f4cc9870e4c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.434639] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e7387d8-3925-4586-8f3d-3509113784c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.449557] env[68217]: DEBUG nova.compute.provider_tree [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1042.523796] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.601798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.793755] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1042.793755] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1042.793755] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1042.885823] env[68217]: DEBUG nova.network.neutron [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1042.893354] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52be362d-3e8d-a3a6-0ee0-0a8bcba62df8, 'name': SearchDatastore_Task, 'duration_secs': 0.008098} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.894103] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-11cd4692-0cc5-4487-83cc-6e98dc367718 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.899348] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1042.899348] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d3fee1-8e84-4952-8f67-4aa2ff181c25" [ 1042.899348] env[68217]: _type = "Task" [ 1042.899348] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.906823] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d3fee1-8e84-4952-8f67-4aa2ff181c25, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.953005] env[68217]: DEBUG nova.scheduler.client.report [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1043.022745] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.058975] env[68217]: DEBUG nova.network.neutron [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1043.148495] env[68217]: DEBUG nova.network.neutron [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Successfully updated port: ff379aaa-198c-405e-ae1f-591e6f4cc070 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1043.409688] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d3fee1-8e84-4952-8f67-4aa2ff181c25, 'name': SearchDatastore_Task, 'duration_secs': 0.009612} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.409951] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.410266] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 08ba7156-1c6d-4385-939c-bdd575c7fda3/08ba7156-1c6d-4385-939c-bdd575c7fda3.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1043.410529] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ca3cf90e-e90c-4c98-8153-9756f0d6861c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.417070] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1043.417070] env[68217]: value = "task-2961865" [ 1043.417070] env[68217]: _type = "Task" [ 1043.417070] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.424590] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.459196] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.312s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1043.459472] env[68217]: INFO nova.compute.manager [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Migrating [ 1043.525339] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.561303] env[68217]: DEBUG oslo_concurrency.lockutils [req-f32b74c7-dc66-4608-a6ce-ebfea78f8bdf req-a119bccc-08c0-4360-97a8-626472a40e4e service nova] Releasing lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1043.561761] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.561958] env[68217]: DEBUG nova.network.neutron [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.648531] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.648733] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.648913] env[68217]: DEBUG nova.network.neutron [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.843058] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.843330] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.843487] env[68217]: DEBUG nova.network.neutron [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1043.928349] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961865, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.439701} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.928639] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 08ba7156-1c6d-4385-939c-bdd575c7fda3/08ba7156-1c6d-4385-939c-bdd575c7fda3.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1043.928840] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1043.929111] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b70f0bcc-0e2d-4ce7-a1ce-ce26cece3d85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.935781] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1043.935781] env[68217]: value = "task-2961866" [ 1043.935781] env[68217]: _type = "Task" [ 1043.935781] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.943347] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961866, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.974206] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1043.974397] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1043.974574] env[68217]: DEBUG nova.network.neutron [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1044.024555] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.066507] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.066881] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.095892] env[68217]: DEBUG nova.network.neutron [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1044.194142] env[68217]: WARNING nova.network.neutron [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] 246af4c9-69b4-4542-84b9-2afe67cf297a already exists in list: networks containing: ['246af4c9-69b4-4542-84b9-2afe67cf297a']. ignoring it [ 1044.194365] env[68217]: WARNING nova.network.neutron [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] 246af4c9-69b4-4542-84b9-2afe67cf297a already exists in list: networks containing: ['246af4c9-69b4-4542-84b9-2afe67cf297a']. ignoring it [ 1044.337306] env[68217]: DEBUG nova.network.neutron [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [{"id": "753c250a-569f-42f8-a9e7-fed02079c841", "address": "fa:16:3e:ee:87:7a", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753c250a-56", "ovs_interfaceid": "753c250a-569f-42f8-a9e7-fed02079c841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.357834] env[68217]: DEBUG nova.compute.manager [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-vif-plugged-ff379aaa-198c-405e-ae1f-591e6f4cc070 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1044.358058] env[68217]: DEBUG oslo_concurrency.lockutils [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1044.358262] env[68217]: DEBUG oslo_concurrency.lockutils [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1044.358427] env[68217]: DEBUG oslo_concurrency.lockutils [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1044.358589] env[68217]: DEBUG nova.compute.manager [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] No waiting events found dispatching network-vif-plugged-ff379aaa-198c-405e-ae1f-591e6f4cc070 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1044.358746] env[68217]: WARNING nova.compute.manager [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received unexpected event network-vif-plugged-ff379aaa-198c-405e-ae1f-591e6f4cc070 for instance with vm_state active and task_state None. [ 1044.359218] env[68217]: DEBUG nova.compute.manager [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-changed-ff379aaa-198c-405e-ae1f-591e6f4cc070 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1044.359218] env[68217]: DEBUG nova.compute.manager [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing instance network info cache due to event network-changed-ff379aaa-198c-405e-ae1f-591e6f4cc070. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1044.359348] env[68217]: DEBUG oslo_concurrency.lockutils [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1044.447244] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961866, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.144801} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.447515] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1044.448291] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9139b27c-98d5-4f8f-9592-063488683207 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.470797] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Reconfiguring VM instance instance-00000066 to attach disk [datastore2] 08ba7156-1c6d-4385-939c-bdd575c7fda3/08ba7156-1c6d-4385-939c-bdd575c7fda3.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.474745] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-264ec61b-ddb5-4e8b-b3eb-7f5f28b82726 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.497338] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1044.497338] env[68217]: value = "task-2961867" [ 1044.497338] env[68217]: _type = "Task" [ 1044.497338] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.505516] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961867, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.522704] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961864, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.569229] env[68217]: DEBUG nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1044.833027] env[68217]: DEBUG nova.network.neutron [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance_info_cache with network_info: [{"id": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "address": "fa:16:3e:43:19:4b", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa112e6da-1e", "ovs_interfaceid": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1044.840037] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1044.840324] env[68217]: DEBUG nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Instance network_info: |[{"id": "753c250a-569f-42f8-a9e7-fed02079c841", "address": "fa:16:3e:ee:87:7a", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753c250a-56", "ovs_interfaceid": "753c250a-569f-42f8-a9e7-fed02079c841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1044.840707] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:87:7a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0e00b2f1-c70f-4b21-86eb-810643cc1680', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '753c250a-569f-42f8-a9e7-fed02079c841', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1044.848143] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1044.848885] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1044.849135] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebca9cd8-5661-40d4-8166-a18b995b0a4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.882247] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1044.882247] env[68217]: value = "task-2961868" [ 1044.882247] env[68217]: _type = "Task" [ 1044.882247] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.890073] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961868, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.008067] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961867, 'name': ReconfigVM_Task, 'duration_secs': 0.44443} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.011195] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Reconfigured VM instance instance-00000066 to attach disk [datastore2] 08ba7156-1c6d-4385-939c-bdd575c7fda3/08ba7156-1c6d-4385-939c-bdd575c7fda3.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1045.011802] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8fd91569-b964-4772-9f62-354d2c897879 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.019060] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1045.019060] env[68217]: value = "task-2961869" [ 1045.019060] env[68217]: _type = "Task" [ 1045.019060] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.025522] env[68217]: DEBUG oslo_vmware.api [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961864, 'name': ReconfigVM_Task, 'duration_secs': 2.762179} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.026159] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594343', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'name': 'volume-3e056b4f-fea1-46b1-b841-37e7f391cb46', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attached', 'instance': 'd14026b1-84dd-430e-be94-94dcb1f47473', 'attached_at': '', 'detached_at': '', 'volume_id': '3e056b4f-fea1-46b1-b841-37e7f391cb46', 'serial': '3e056b4f-fea1-46b1-b841-37e7f391cb46'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1045.031689] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961869, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.040376] env[68217]: DEBUG nova.network.neutron [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.094666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1045.095046] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1045.096594] env[68217]: INFO nova.compute.claims [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1045.100576] env[68217]: DEBUG nova.network.neutron [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "address": "fa:16:3e:71:68:79", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd03a7d5-90", "ovs_interfaceid": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ff379aaa-198c-405e-ae1f-591e6f4cc070", "address": "fa:16:3e:61:77:85", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff379aaa-19", "ovs_interfaceid": "ff379aaa-198c-405e-ae1f-591e6f4cc070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.335850] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.393015] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961868, 'name': CreateVM_Task, 'duration_secs': 0.381642} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.393193] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1045.393877] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.394055] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.394381] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1045.394626] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be92b8fe-216b-48ad-846e-300cc533eeab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.399150] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1045.399150] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fba900-b13b-daa3-874d-317c563dda42" [ 1045.399150] env[68217]: _type = "Task" [ 1045.399150] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.407120] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fba900-b13b-daa3-874d-317c563dda42, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.529776] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961869, 'name': Rename_Task, 'duration_secs': 0.222801} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.530029] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.530267] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0f675175-8911-4c07-b17f-843d9ef7b325 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.536690] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1045.536690] env[68217]: value = "task-2961870" [ 1045.536690] env[68217]: _type = "Task" [ 1045.536690] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.543745] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.544991] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961870, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.581404] env[68217]: DEBUG nova.objects.instance [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'flavor' on Instance uuid d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.604345] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.604944] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.605118] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.605762] env[68217]: DEBUG oslo_concurrency.lockutils [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.605944] env[68217]: DEBUG nova.network.neutron [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Refreshing network info cache for port ff379aaa-198c-405e-ae1f-591e6f4cc070 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1045.607625] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d47732-e176-4408-827e-57ccc171eccc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.626115] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1045.627027] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1045.627027] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1045.627027] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1045.627027] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1045.627027] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1045.627305] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1045.627375] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1045.627604] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1045.627784] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1045.627957] env[68217]: DEBUG nova.virt.hardware [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1045.635375] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfiguring VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1045.635975] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76d156f3-f9b1-4747-b96e-33c3251b26c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.654523] env[68217]: DEBUG oslo_vmware.api [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1045.654523] env[68217]: value = "task-2961871" [ 1045.654523] env[68217]: _type = "Task" [ 1045.654523] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.662836] env[68217]: DEBUG oslo_vmware.api [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961871, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.859914] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ece0079-3986-4915-b30b-de82abd5ec98 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.880299] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f170f07b-4895-45ca-b9ba-7868187f8b7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.888245] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance 'ffff4cf4-f663-4965-84d1-8351bfde1252' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1045.909496] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fba900-b13b-daa3-874d-317c563dda42, 'name': SearchDatastore_Task, 'duration_secs': 0.009541} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.909900] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1045.910054] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1045.910345] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.910496] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1045.910700] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1045.910985] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0f3202e-8f6a-4df8-aa40-eea5e4e711c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.919809] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1045.919997] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1045.920763] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3bec879-ba51-41b7-81bc-c53952a319cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.926527] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1045.926527] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527263d4-602d-ec9c-bc5b-aa285a13be7c" [ 1045.926527] env[68217]: _type = "Task" [ 1045.926527] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.934408] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527263d4-602d-ec9c-bc5b-aa285a13be7c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.050856] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961870, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.166737] env[68217]: DEBUG oslo_vmware.api [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.340069] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6afce74-38b7-4f6c-89b6-3d4d0a6da524 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.347603] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f334cf-7400-4663-8922-931a5956a959 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.378597] env[68217]: DEBUG nova.network.neutron [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updated VIF entry in instance network info cache for port ff379aaa-198c-405e-ae1f-591e6f4cc070. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1046.379059] env[68217]: DEBUG nova.network.neutron [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "address": "fa:16:3e:71:68:79", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd03a7d5-90", "ovs_interfaceid": "bd03a7d5-905c-4c0d-9972-f46ba5682fd7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ff379aaa-198c-405e-ae1f-591e6f4cc070", "address": "fa:16:3e:61:77:85", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff379aaa-19", "ovs_interfaceid": "ff379aaa-198c-405e-ae1f-591e6f4cc070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.380648] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab0fa617-d089-4287-b176-6f1ab8984e81 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.391185] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-396d7b46-00ce-45ef-96d8-34c0d6dd183e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.396558] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1046.397212] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ae1f608c-1dae-458b-9797-a180e5d5b772 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.408969] env[68217]: DEBUG nova.compute.provider_tree [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1046.411126] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1046.411126] env[68217]: value = "task-2961872" [ 1046.411126] env[68217]: _type = "Task" [ 1046.411126] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.418948] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961872, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.436856] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527263d4-602d-ec9c-bc5b-aa285a13be7c, 'name': SearchDatastore_Task, 'duration_secs': 0.0182} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.438048] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2267fa00-ca7f-475a-8b34-3f1a8ea6acd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.442802] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1046.442802] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d6d472-dfd9-9796-5939-de1714cce3ec" [ 1046.442802] env[68217]: _type = "Task" [ 1046.442802] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.449688] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d6d472-dfd9-9796-5939-de1714cce3ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.547105] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961870, 'name': PowerOnVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.589445] env[68217]: DEBUG oslo_concurrency.lockutils [None req-20073242-d637-499e-8c26-1397839508e6 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 6.754s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.665091] env[68217]: DEBUG oslo_vmware.api [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961871, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.886281] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.886722] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.886957] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1046.887363] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1046.887551] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1046.889486] env[68217]: DEBUG oslo_concurrency.lockutils [req-e54ea808-1a84-4075-a925-6930fc8066bf req-3d29dc71-f386-45a7-8a57-37dfb05ed2eb service nova] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.890179] env[68217]: INFO nova.compute.manager [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Terminating instance [ 1046.912888] env[68217]: DEBUG nova.scheduler.client.report [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1046.925983] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961872, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.953302] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d6d472-dfd9-9796-5939-de1714cce3ec, 'name': SearchDatastore_Task, 'duration_secs': 0.010136} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.953509] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1046.953766] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/6b4dff91-254e-43cc-85cf-7de6214dcafd.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1046.954031] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2be093aa-2182-4eaf-b4be-dfae0f8414c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.961806] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1046.961806] env[68217]: value = "task-2961873" [ 1046.961806] env[68217]: _type = "Task" [ 1046.961806] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1046.970129] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961873, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.048899] env[68217]: DEBUG oslo_vmware.api [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961870, 'name': PowerOnVM_Task, 'duration_secs': 1.481748} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.049158] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.049381] env[68217]: INFO nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Took 9.05 seconds to spawn the instance on the hypervisor. [ 1047.049552] env[68217]: DEBUG nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1047.050365] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a835178c-f9ef-487c-9eff-c010ec6d6735 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.063961] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb806c5-1b4b-424c-8cd0-d02e0ace91f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.083959] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance '04149a5c-d1b5-4d71-a1ca-44696506a40d' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1047.167803] env[68217]: DEBUG oslo_vmware.api [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961871, 'name': ReconfigVM_Task, 'duration_secs': 1.368069} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.168382] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1047.168604] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfigured VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1047.394492] env[68217]: DEBUG nova.compute.manager [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1047.394745] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1047.395648] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55b9206d-cbe2-40fc-8773-312291f2c7e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.404242] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.404556] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f0b68b32-e7f6-446d-94a9-260302c70b7d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.413066] env[68217]: DEBUG oslo_vmware.api [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1047.413066] env[68217]: value = "task-2961874" [ 1047.413066] env[68217]: _type = "Task" [ 1047.413066] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.421947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.422565] env[68217]: DEBUG nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1047.433889] env[68217]: DEBUG oslo_vmware.api [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961874, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.434255] env[68217]: DEBUG oslo_vmware.api [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961872, 'name': PowerOnVM_Task, 'duration_secs': 0.897562} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.434520] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1047.434752] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d4f2efc2-c6b9-4bcd-9ec6-7a875d240e61 tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance 'ffff4cf4-f663-4965-84d1-8351bfde1252' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1047.473469] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961873, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503767} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.473777] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/6b4dff91-254e-43cc-85cf-7de6214dcafd.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1047.474061] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1047.474341] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e196e694-8504-44f2-ac02-be472f730727 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.481119] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1047.481119] env[68217]: value = "task-2961875" [ 1047.481119] env[68217]: _type = "Task" [ 1047.481119] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.489124] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961875, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.570197] env[68217]: INFO nova.compute.manager [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Took 20.87 seconds to build instance. [ 1047.590164] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1047.590705] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-819cc2ee-d6c8-4b6b-9eeb-d77f13c2ecd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.598057] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1047.598057] env[68217]: value = "task-2961876" [ 1047.598057] env[68217]: _type = "Task" [ 1047.598057] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.606100] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961876, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1047.674181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-307dbe02-dc26-41ef-9425-671d0c398d83 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-ff379aaa-198c-405e-ae1f-591e6f4cc070" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.617s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1047.923419] env[68217]: DEBUG oslo_vmware.api [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961874, 'name': PowerOffVM_Task, 'duration_secs': 0.322431} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.923696] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1047.923854] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1047.924123] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-415167c4-6160-441c-b8d5-491529dfc24f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.935747] env[68217]: DEBUG nova.compute.utils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1047.937776] env[68217]: DEBUG nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1047.937776] env[68217]: DEBUG nova.network.neutron [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1047.988130] env[68217]: DEBUG nova.policy [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0939a9bd52d142818e49fbf0c576e4a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd093c295105c44cca8bd67bd514429d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1047.995285] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961875, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.076489} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1047.995598] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1047.996379] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f3e0035-2822-4b5f-995f-2597c9f60fd1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.019118] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/6b4dff91-254e-43cc-85cf-7de6214dcafd.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1048.020401] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-658ac598-c7c2-4ef5-ae40-d1f8a202a078 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.035045] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1048.035254] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1048.035429] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleting the datastore file [datastore1] d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1048.037766] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9e5a547e-bc03-456b-858d-046f1a191c1e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.045522] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1048.045522] env[68217]: value = "task-2961878" [ 1048.045522] env[68217]: _type = "Task" [ 1048.045522] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.045758] env[68217]: DEBUG oslo_vmware.api [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1048.045758] env[68217]: value = "task-2961879" [ 1048.045758] env[68217]: _type = "Task" [ 1048.045758] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.059741] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961878, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.060037] env[68217]: DEBUG oslo_vmware.api [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961879, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.072755] env[68217]: DEBUG oslo_concurrency.lockutils [None req-39ca67e6-d1b2-49f6-be6f-fb1e93dbd429 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.387s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.110052] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1048.110297] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance '04149a5c-d1b5-4d71-a1ca-44696506a40d' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1048.335450] env[68217]: DEBUG nova.network.neutron [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Successfully created port: 018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1048.441226] env[68217]: DEBUG nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1048.456198] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "08ba7156-1c6d-4385-939c-bdd575c7fda3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.456198] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.456198] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "08ba7156-1c6d-4385-939c-bdd575c7fda3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1048.456198] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1048.456198] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1048.457144] env[68217]: INFO nova.compute.manager [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Terminating instance [ 1048.561483] env[68217]: DEBUG oslo_vmware.api [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961879, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144809} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.561866] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961878, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.562276] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1048.562565] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1048.562860] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1048.563535] env[68217]: INFO nova.compute.manager [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Took 1.17 seconds to destroy the instance on the hypervisor. [ 1048.563871] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1048.564191] env[68217]: DEBUG nova.compute.manager [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1048.565830] env[68217]: DEBUG nova.network.neutron [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1048.617453] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1048.617691] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1048.618039] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1048.618245] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1048.618394] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1048.618539] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1048.618742] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1048.618900] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1048.619080] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1048.619247] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1048.619464] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1048.627211] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-666b5dfd-59b6-4214-b7fa-0aa1e23d6cf5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.643843] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1048.643843] env[68217]: value = "task-2961880" [ 1048.643843] env[68217]: _type = "Task" [ 1048.643843] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1048.651927] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961880, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.968913] env[68217]: DEBUG nova.compute.manager [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1048.969253] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1048.974674] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b10942d7-1c87-4482-adc6-609fa338b9ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.986150] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1048.986434] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc425e4b-be41-4b4c-938a-c46ca84f5a01 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.994546] env[68217]: DEBUG oslo_vmware.api [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1048.994546] env[68217]: value = "task-2961881" [ 1048.994546] env[68217]: _type = "Task" [ 1048.994546] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.004208] env[68217]: DEBUG oslo_vmware.api [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961881, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.059580] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961878, 'name': ReconfigVM_Task, 'duration_secs': 0.826052} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.059884] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/6b4dff91-254e-43cc-85cf-7de6214dcafd.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1049.060569] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-62d86120-4567-4a6b-9a4e-eefa6312f628 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.067045] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1049.067045] env[68217]: value = "task-2961882" [ 1049.067045] env[68217]: _type = "Task" [ 1049.067045] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.075681] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961882, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.156742] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961880, 'name': ReconfigVM_Task, 'duration_secs': 0.16918} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.157268] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance '04149a5c-d1b5-4d71-a1ca-44696506a40d' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1049.471036] env[68217]: DEBUG nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1049.506633] env[68217]: DEBUG oslo_vmware.api [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961881, 'name': PowerOffVM_Task, 'duration_secs': 0.242502} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.508940] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1049.509133] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1049.511137] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f6edbfd9-fb5f-4103-a02f-0fef7a01fbc8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.521679] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1049.521918] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.522078] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1049.522267] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.524665] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1049.524925] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1049.525171] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1049.525340] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1049.525511] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1049.525676] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1049.525850] env[68217]: DEBUG nova.virt.hardware [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1049.527343] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a236b03e-fcf0-4d52-9892-2131f83a35d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.535605] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c8e599-8d68-49d9-ac2e-b87dbc10a063 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.574425] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1049.574425] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1049.574425] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleting the datastore file [datastore2] 08ba7156-1c6d-4385-939c-bdd575c7fda3 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1049.574425] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d0e95f3-4e1b-44cf-be31-62885853a7ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.580733] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961882, 'name': Rename_Task, 'duration_secs': 0.157927} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1049.583452] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1049.584129] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-49170b06-9fdd-4a67-818f-a18180f89516 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.589189] env[68217]: DEBUG oslo_vmware.api [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1049.589189] env[68217]: value = "task-2961884" [ 1049.589189] env[68217]: _type = "Task" [ 1049.589189] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.594907] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1049.594907] env[68217]: value = "task-2961885" [ 1049.594907] env[68217]: _type = "Task" [ 1049.594907] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.602580] env[68217]: DEBUG oslo_vmware.api [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.607726] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961885, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.666084] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1049.666506] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1049.666627] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1049.666696] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1049.666843] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1049.667064] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1049.667698] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1049.667698] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1049.667698] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1049.667943] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1049.668014] env[68217]: DEBUG nova.virt.hardware [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1049.673409] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Reconfiguring VM instance instance-00000054 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1049.674244] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0855357d-059e-4cef-a158-cda3f2c6a202 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1049.695225] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1049.695225] env[68217]: value = "task-2961886" [ 1049.695225] env[68217]: _type = "Task" [ 1049.695225] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1049.705767] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961886, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1049.767054] env[68217]: DEBUG nova.compute.manager [req-59dbb372-c6c3-4419-82a7-b15a92cb0d14 req-4b54a3e5-56c3-409b-9fea-fbca067f092b service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Received event network-vif-deleted-c4ffafa7-b375-4f41-90e8-0db42f248139 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1049.767341] env[68217]: INFO nova.compute.manager [req-59dbb372-c6c3-4419-82a7-b15a92cb0d14 req-4b54a3e5-56c3-409b-9fea-fbca067f092b service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Neutron deleted interface c4ffafa7-b375-4f41-90e8-0db42f248139; detaching it from the instance and deleting it from the info cache [ 1049.767473] env[68217]: DEBUG nova.network.neutron [req-59dbb372-c6c3-4419-82a7-b15a92cb0d14 req-4b54a3e5-56c3-409b-9fea-fbca067f092b service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.932012] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "ffff4cf4-f663-4965-84d1-8351bfde1252" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.932012] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1049.932012] env[68217]: DEBUG nova.compute.manager [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Going to confirm migration 5 {{(pid=68217) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1049.978604] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-bd03a7d5-905c-4c0d-9972-f46ba5682fd7" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1049.979122] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-bd03a7d5-905c-4c0d-9972-f46ba5682fd7" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.017869] env[68217]: DEBUG nova.network.neutron [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Successfully updated port: 018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1050.084606] env[68217]: DEBUG nova.network.neutron [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1050.104288] env[68217]: DEBUG oslo_vmware.api [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.231119} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.104604] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1050.104801] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1050.105010] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1050.105387] env[68217]: INFO nova.compute.manager [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1050.105592] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1050.105624] env[68217]: DEBUG nova.compute.manager [-] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1050.105692] env[68217]: DEBUG nova.network.neutron [-] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1050.110525] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961885, 'name': PowerOnVM_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.217026] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961886, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.270786] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-089b402e-3d35-4d3e-b32e-5248a004fe97 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.281682] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd91d5e-748f-42ef-8e48-905d5137f018 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.315193] env[68217]: DEBUG nova.compute.manager [req-59dbb372-c6c3-4419-82a7-b15a92cb0d14 req-4b54a3e5-56c3-409b-9fea-fbca067f092b service nova] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Detach interface failed, port_id=c4ffafa7-b375-4f41-90e8-0db42f248139, reason: Instance d14026b1-84dd-430e-be94-94dcb1f47473 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1050.354172] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.354325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.354515] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1050.354702] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1050.354882] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1050.357066] env[68217]: INFO nova.compute.manager [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Terminating instance [ 1050.468708] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.468898] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquired lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.469092] env[68217]: DEBUG nova.network.neutron [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.469282] env[68217]: DEBUG nova.objects.instance [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lazy-loading 'info_cache' on Instance uuid ffff4cf4-f663-4965-84d1-8351bfde1252 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.481379] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.481548] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.482402] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c5e630-aaf1-4b62-ae65-e82625a094ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.500811] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3546cfa3-f1e2-4e0e-bf3e-768f39e4aee1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.523379] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1050.523434] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1050.523567] env[68217]: DEBUG nova.network.neutron [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1050.531286] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfiguring VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1050.532147] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9dfdebc-2989-4b8b-b40a-48c5b2c2a194 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.550180] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1050.550180] env[68217]: value = "task-2961887" [ 1050.550180] env[68217]: _type = "Task" [ 1050.550180] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.558502] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.588143] env[68217]: INFO nova.compute.manager [-] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Took 2.02 seconds to deallocate network for instance. [ 1050.605936] env[68217]: DEBUG oslo_vmware.api [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961885, 'name': PowerOnVM_Task, 'duration_secs': 0.855238} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1050.606212] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1050.606412] env[68217]: INFO nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Took 9.69 seconds to spawn the instance on the hypervisor. [ 1050.606591] env[68217]: DEBUG nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1050.607390] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4bd39c-54a3-4ec7-aace-568f5f0aa2e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.707610] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961886, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.861551] env[68217]: DEBUG nova.compute.manager [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1050.861773] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1050.862710] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73745aed-91d9-4944-bfcf-5bb954045e7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.870074] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.870307] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb6c9398-410e-447c-8023-ed1fec3b9012 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.876239] env[68217]: DEBUG oslo_vmware.api [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1050.876239] env[68217]: value = "task-2961888" [ 1050.876239] env[68217]: _type = "Task" [ 1050.876239] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.886688] env[68217]: DEBUG oslo_vmware.api [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961888, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1050.915749] env[68217]: DEBUG nova.network.neutron [-] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.055484] env[68217]: DEBUG nova.network.neutron [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1051.063947] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.095944] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.096308] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.096553] env[68217]: DEBUG nova.objects.instance [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'resources' on Instance uuid d14026b1-84dd-430e-be94-94dcb1f47473 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1051.125920] env[68217]: INFO nova.compute.manager [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Took 15.79 seconds to build instance. [ 1051.209467] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961886, 'name': ReconfigVM_Task, 'duration_secs': 1.198129} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.209848] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Reconfigured VM instance instance-00000054 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1051.210788] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c772a0e-67a5-4192-8199-b165269ea611 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.232966] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Reconfiguring VM instance instance-00000054 to attach disk [datastore2] 04149a5c-d1b5-4d71-a1ca-44696506a40d/04149a5c-d1b5-4d71-a1ca-44696506a40d.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1051.233961] env[68217]: DEBUG nova.network.neutron [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updating instance_info_cache with network_info: [{"id": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "address": "fa:16:3e:23:9b:2c", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap018f74db-1d", "ovs_interfaceid": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.235142] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdb68f25-2121-47b7-a66c-f4212e5943e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.253821] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1051.253821] env[68217]: value = "task-2961889" [ 1051.253821] env[68217]: _type = "Task" [ 1051.253821] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.261588] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.386554] env[68217]: DEBUG oslo_vmware.api [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961888, 'name': PowerOffVM_Task, 'duration_secs': 0.182633} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.387588] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.387588] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1051.387588] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7a358bfd-657a-4f2a-8385-a5dffac2db16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.418706] env[68217]: INFO nova.compute.manager [-] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Took 1.31 seconds to deallocate network for instance. [ 1051.447040] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1051.447336] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1051.447514] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Deleting the datastore file [datastore1] 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1051.447809] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3b31de23-54e3-43c5-a757-71db402c7b03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.454709] env[68217]: DEBUG oslo_vmware.api [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1051.454709] env[68217]: value = "task-2961891" [ 1051.454709] env[68217]: _type = "Task" [ 1051.454709] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.464288] env[68217]: DEBUG oslo_vmware.api [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961891, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.560409] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.627623] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b37dfe20-bf76-41ef-8925-d044301169e7 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.305s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.748754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1051.749097] env[68217]: DEBUG nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Instance network_info: |[{"id": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "address": "fa:16:3e:23:9b:2c", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap018f74db-1d", "ovs_interfaceid": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1051.749572] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:9b:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '018f74db-1dcd-49e4-bd11-2ab20c34e986', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1051.758138] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.759121] env[68217]: DEBUG nova.network.neutron [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance_info_cache with network_info: [{"id": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "address": "fa:16:3e:43:19:4b", "network": {"id": "3d971357-3a3e-4e28-95f1-f817a2fa40ed", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1891045338-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "8fb62d18446841a3b2a6ac25ab5dc869", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "02bbcead-d833-4543-bec6-fb82dfe659ff", "external-id": "nsx-vlan-transportzone-478", "segmentation_id": 478, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa112e6da-1e", "ovs_interfaceid": "a112e6da-1e76-4618-b45e-229cbb5c0ebd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1051.763618] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1051.768149] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7af9f51e-2186-479f-838f-173a0a427f8b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.792656] env[68217]: DEBUG oslo_vmware.api [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961889, 'name': ReconfigVM_Task, 'duration_secs': 0.279148} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.793955] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Reconfigured VM instance instance-00000054 to attach disk [datastore2] 04149a5c-d1b5-4d71-a1ca-44696506a40d/04149a5c-d1b5-4d71-a1ca-44696506a40d.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1051.794275] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance '04149a5c-d1b5-4d71-a1ca-44696506a40d' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1051.797428] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1051.797428] env[68217]: value = "task-2961892" [ 1051.797428] env[68217]: _type = "Task" [ 1051.797428] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.802948] env[68217]: DEBUG nova.compute.manager [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Received event network-vif-plugged-018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.803167] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.803370] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1051.803536] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1051.803699] env[68217]: DEBUG nova.compute.manager [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] No waiting events found dispatching network-vif-plugged-018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1051.803859] env[68217]: WARNING nova.compute.manager [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Received unexpected event network-vif-plugged-018f74db-1dcd-49e4-bd11-2ab20c34e986 for instance with vm_state building and task_state spawning. [ 1051.804022] env[68217]: DEBUG nova.compute.manager [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Received event network-changed-018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.804180] env[68217]: DEBUG nova.compute.manager [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Refreshing instance network info cache due to event network-changed-018f74db-1dcd-49e4-bd11-2ab20c34e986. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1051.804359] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] Acquiring lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.804491] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] Acquired lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.804643] env[68217]: DEBUG nova.network.neutron [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Refreshing network info cache for port 018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1051.817222] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961892, 'name': CreateVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.858437] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b20593a7-2c58-4743-9bb6-0b2899d9940a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.868184] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a84fec8-7a00-45f3-aee4-52f727aed734 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.898549] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561f5e1d-289c-4860-af49-4d295c1811af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.906191] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092a9ddc-b27a-4f0b-98a7-deddb58daea8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.920059] env[68217]: DEBUG nova.compute.provider_tree [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.925934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1051.968503] env[68217]: DEBUG oslo_vmware.api [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961891, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15766} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.968861] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1051.969134] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1051.969395] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1051.969740] env[68217]: INFO nova.compute.manager [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1051.969931] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1051.970273] env[68217]: DEBUG nova.compute.manager [-] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1051.970443] env[68217]: DEBUG nova.network.neutron [-] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1051.991833] env[68217]: DEBUG nova.compute.manager [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Received event network-changed-753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1051.992206] env[68217]: DEBUG nova.compute.manager [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing instance network info cache due to event network-changed-753c250a-569f-42f8-a9e7-fed02079c841. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1051.992513] env[68217]: DEBUG oslo_concurrency.lockutils [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] Acquiring lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1051.992700] env[68217]: DEBUG oslo_concurrency.lockutils [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] Acquired lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1051.992881] env[68217]: DEBUG nova.network.neutron [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing network info cache for port 753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1052.061337] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.265557] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Releasing lock "refresh_cache-ffff4cf4-f663-4965-84d1-8351bfde1252" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.265917] env[68217]: DEBUG nova.objects.instance [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lazy-loading 'migration_context' on Instance uuid ffff4cf4-f663-4965-84d1-8351bfde1252 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.313619] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e20990cd-a3e1-4805-b609-c85ca4d71161 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.316421] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961892, 'name': CreateVM_Task, 'duration_secs': 0.384312} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.316587] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1052.317577] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.317737] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.318049] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1052.318302] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b21bff9-92cf-447b-8056-4b4d41622190 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.338759] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dacf647-6af1-4343-a7cb-eaaaa57fe860 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.339482] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1052.339482] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5202ca19-b778-90d5-0eef-868310d12b2b" [ 1052.339482] env[68217]: _type = "Task" [ 1052.339482] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.356008] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance '04149a5c-d1b5-4d71-a1ca-44696506a40d' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1052.372197] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5202ca19-b778-90d5-0eef-868310d12b2b, 'name': SearchDatastore_Task, 'duration_secs': 0.009779} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.372476] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1052.372703] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1052.372947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.373084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1052.373262] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1052.373506] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-996ad2eb-2b90-4575-9553-1f1fa43271f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.381306] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1052.381495] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1052.382186] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a3ca1393-3538-4e8d-8980-4cea37d26857 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.387607] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1052.387607] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b1f5d1-bdaf-0f9b-6535-2c5f29804057" [ 1052.387607] env[68217]: _type = "Task" [ 1052.387607] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.395140] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b1f5d1-bdaf-0f9b-6535-2c5f29804057, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.423764] env[68217]: DEBUG nova.scheduler.client.report [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1052.564543] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.697031] env[68217]: DEBUG nova.network.neutron [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updated VIF entry in instance network info cache for port 018f74db-1dcd-49e4-bd11-2ab20c34e986. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.697441] env[68217]: DEBUG nova.network.neutron [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updating instance_info_cache with network_info: [{"id": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "address": "fa:16:3e:23:9b:2c", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap018f74db-1d", "ovs_interfaceid": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.773980] env[68217]: DEBUG nova.objects.base [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1052.774984] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93cf8168-2fb6-48d4-8271-f5d300c77bc0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.798751] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f592c0c5-ffa1-4d57-b28e-953d7342bcb5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.807153] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1052.807153] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f2dca3-a44e-3166-94f2-95ffb7525f53" [ 1052.807153] env[68217]: _type = "Task" [ 1052.807153] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.812352] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f2dca3-a44e-3166-94f2-95ffb7525f53, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.837472] env[68217]: DEBUG nova.network.neutron [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updated VIF entry in instance network info cache for port 753c250a-569f-42f8-a9e7-fed02079c841. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1052.837812] env[68217]: DEBUG nova.network.neutron [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [{"id": "753c250a-569f-42f8-a9e7-fed02079c841", "address": "fa:16:3e:ee:87:7a", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753c250a-56", "ovs_interfaceid": "753c250a-569f-42f8-a9e7-fed02079c841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1052.897343] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b1f5d1-bdaf-0f9b-6535-2c5f29804057, 'name': SearchDatastore_Task, 'duration_secs': 0.008753} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.897843] env[68217]: DEBUG nova.network.neutron [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Port 772726db-4382-4051-9a7d-abfc670d5c9b binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1052.899564] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5860e815-134b-4faa-af0d-f75f5e11ee54 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.906520] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1052.906520] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c383e9-a25e-fcc8-2bcf-9fb45d236fd7" [ 1052.906520] env[68217]: _type = "Task" [ 1052.906520] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.914660] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c383e9-a25e-fcc8-2bcf-9fb45d236fd7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.929180] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1052.931265] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.005s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1052.931508] env[68217]: DEBUG nova.objects.instance [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lazy-loading 'resources' on Instance uuid 08ba7156-1c6d-4385-939c-bdd575c7fda3 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1052.951983] env[68217]: INFO nova.scheduler.client.report [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted allocations for instance d14026b1-84dd-430e-be94-94dcb1f47473 [ 1052.970789] env[68217]: DEBUG nova.network.neutron [-] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.065130] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.203166] env[68217]: DEBUG oslo_concurrency.lockutils [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] Releasing lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.203460] env[68217]: DEBUG nova.compute.manager [req-b4fdec67-a735-4f19-a610-799457b4d013 req-0103ebd4-7243-45eb-9fc2-b09302ad10de service nova] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Received event network-vif-deleted-a5a64da9-f003-45ab-9b90-62488204fc29 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.315381] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f2dca3-a44e-3166-94f2-95ffb7525f53, 'name': SearchDatastore_Task, 'duration_secs': 0.007206} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.315689] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.340469] env[68217]: DEBUG oslo_concurrency.lockutils [req-7693416d-f9c3-4e4b-9821-64d04d8c568f req-3525d245-ec0a-4bc8-889c-de6411a96b47 service nova] Releasing lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.417093] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c383e9-a25e-fcc8-2bcf-9fb45d236fd7, 'name': SearchDatastore_Task, 'duration_secs': 0.012911} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.417093] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1053.417093] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1053.417316] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-440735b9-c783-4022-8ab5-844098289848 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.424016] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1053.424016] env[68217]: value = "task-2961893" [ 1053.424016] env[68217]: _type = "Task" [ 1053.424016] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.431701] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961893, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.459595] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66933995-3ba2-4fba-9843-dea1d78f7ab8 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "d14026b1-84dd-430e-be94-94dcb1f47473" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.573s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.472950] env[68217]: INFO nova.compute.manager [-] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Took 1.50 seconds to deallocate network for instance. [ 1053.567315] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.676930] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13546fe4-dba4-4f06-aea8-6622e22816d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.685062] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d09b8859-1025-451b-9f36-a51ddc4895fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.719803] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a904af1-772a-4e97-9964-2a6029081bfc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.730549] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc6a8d3-2283-4d13-b556-53b657e34b4b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.746651] env[68217]: DEBUG nova.compute.provider_tree [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1053.926169] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1053.927038] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1053.927038] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1053.935146] env[68217]: DEBUG nova.compute.manager [req-83c6ee7d-c490-4ece-b3a4-5200e635a5b9 req-c42aecb7-74f9-44df-8c33-f26a1bcafcc4 service nova] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Received event network-vif-deleted-86568bc3-8f1e-4880-9a22-48003fc7babd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1053.939317] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961893, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471237} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.939317] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1053.940027] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1053.940498] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-53d9b138-0627-42dc-adff-1cc7e5b27e08 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.947016] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1053.947016] env[68217]: value = "task-2961894" [ 1053.947016] env[68217]: _type = "Task" [ 1053.947016] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.957488] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961894, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.982305] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1054.066905] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.250254] env[68217]: DEBUG nova.scheduler.client.report [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1054.457319] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961894, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068299} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.457587] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1054.458697] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb8b39e-9745-4d53-8c6c-c43fb3eeab2c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.481981] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1054.482610] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-64469f37-9a40-4e10-b529-59e06eaa0674 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.502726] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1054.502726] env[68217]: value = "task-2961895" [ 1054.502726] env[68217]: _type = "Task" [ 1054.502726] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1054.510703] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961895, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.566396] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.761551] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.830s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1054.764445] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.449s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1054.788015] env[68217]: INFO nova.scheduler.client.report [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted allocations for instance 08ba7156-1c6d-4385-939c-bdd575c7fda3 [ 1054.971861] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.972059] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1054.972244] env[68217]: DEBUG nova.network.neutron [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1055.013386] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961895, 'name': ReconfigVM_Task, 'duration_secs': 0.270931} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.013707] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfigured VM instance instance-00000068 to attach disk [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1055.014889] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d238257e-1518-4f7f-80ba-2c6a8d25d95f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.021780] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1055.021780] env[68217]: value = "task-2961896" [ 1055.021780] env[68217]: _type = "Task" [ 1055.021780] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.029671] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961896, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.067899] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.294571] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4f408ef0-13b1-428e-833f-e16a95fd1cda tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "08ba7156-1c6d-4385-939c-bdd575c7fda3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.840s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1055.454418] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.454651] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.492161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e77aec-b8b9-4676-8d73-d4434b18e9a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.500103] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06277eeb-2cb0-4917-a39f-b110cb9d4f08 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.537464] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38d51bf0-da97-4081-8e40-f77a9fa1d713 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.545156] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961896, 'name': Rename_Task, 'duration_secs': 0.133977} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1055.547211] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1055.547484] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f9b4f7a5-6714-46c1-b821-aba0950a1e98 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.549720] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b3b865-645f-4aee-bfa3-07756616798a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.563642] env[68217]: DEBUG nova.compute.provider_tree [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.568884] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1055.568884] env[68217]: value = "task-2961897" [ 1055.568884] env[68217]: _type = "Task" [ 1055.568884] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1055.578091] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.583095] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961897, 'name': PowerOnVM_Task} progress is 33%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1055.717275] env[68217]: DEBUG nova.network.neutron [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.909342] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1055.909574] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1055.957505] env[68217]: DEBUG nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1056.068930] env[68217]: DEBUG nova.scheduler.client.report [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1056.079065] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.084009] env[68217]: DEBUG oslo_vmware.api [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961897, 'name': PowerOnVM_Task, 'duration_secs': 0.43895} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.084904] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1056.085135] env[68217]: INFO nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1056.085322] env[68217]: DEBUG nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1056.086105] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4eac28-da6a-4d22-bf00-4760aafd9f20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.220625] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.412274] env[68217]: DEBUG nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1056.480896] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1056.578960] env[68217]: DEBUG oslo_vmware.api [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961887, 'name': ReconfigVM_Task, 'duration_secs': 5.911355} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.579098] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1056.579202] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfigured VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1056.602349] env[68217]: INFO nova.compute.manager [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Took 11.52 seconds to build instance. [ 1056.743988] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4b6a46-a3a8-4d7b-8d4c-9c3018cf01a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.764852] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-589af31e-4f9c-4bee-aa01-96d18d34fcb4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.771751] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance '04149a5c-d1b5-4d71-a1ca-44696506a40d' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1056.835374] env[68217]: DEBUG nova.compute.manager [req-a5b1eb02-e170-43f6-90ee-a5fd08e9ba99 req-4a9d4a9c-184d-4299-9716-f314d64c759f service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-vif-deleted-bd03a7d5-905c-4c0d-9972-f46ba5682fd7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1056.835592] env[68217]: INFO nova.compute.manager [req-a5b1eb02-e170-43f6-90ee-a5fd08e9ba99 req-4a9d4a9c-184d-4299-9716-f314d64c759f service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Neutron deleted interface bd03a7d5-905c-4c0d-9972-f46ba5682fd7; detaching it from the instance and deleting it from the info cache [ 1056.835849] env[68217]: DEBUG nova.network.neutron [req-a5b1eb02-e170-43f6-90ee-a5fd08e9ba99 req-4a9d4a9c-184d-4299-9716-f314d64c759f service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "ff379aaa-198c-405e-ae1f-591e6f4cc070", "address": "fa:16:3e:61:77:85", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapff379aaa-19", "ovs_interfaceid": "ff379aaa-198c-405e-ae1f-591e6f4cc070", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.935652] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1057.080564] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.316s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.083417] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.101s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1057.083645] env[68217]: DEBUG nova.objects.instance [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'resources' on Instance uuid 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1057.103856] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6bfadad3-9ccb-4480-8cc8-461c129699c4 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.037s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1057.278849] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0cc8d6bc-4b7d-4171-aead-bd40e27ac228 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance '04149a5c-d1b5-4d71-a1ca-44696506a40d' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1057.338607] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5b1eb02-e170-43f6-90ee-a5fd08e9ba99 req-4a9d4a9c-184d-4299-9716-f314d64c759f service nova] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.338607] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5b1eb02-e170-43f6-90ee-a5fd08e9ba99 req-4a9d4a9c-184d-4299-9716-f314d64c759f service nova] Acquired lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.340079] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94674b6c-91fa-4369-a250-9f2ecaa2e8a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.357650] env[68217]: DEBUG oslo_concurrency.lockutils [req-a5b1eb02-e170-43f6-90ee-a5fd08e9ba99 req-4a9d4a9c-184d-4299-9716-f314d64c759f service nova] Releasing lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1057.357921] env[68217]: WARNING nova.compute.manager [req-a5b1eb02-e170-43f6-90ee-a5fd08e9ba99 req-4a9d4a9c-184d-4299-9716-f314d64c759f service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Detach interface failed, port_id=bd03a7d5-905c-4c0d-9972-f46ba5682fd7, reason: No device with interface-id bd03a7d5-905c-4c0d-9972-f46ba5682fd7 exists on VM: nova.exception.NotFound: No device with interface-id bd03a7d5-905c-4c0d-9972-f46ba5682fd7 exists on VM [ 1057.670893] env[68217]: INFO nova.scheduler.client.report [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted allocation for migration 70d4a642-d476-4225-89a6-2b2183c2aa27 [ 1057.818703] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.819053] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1057.819252] env[68217]: DEBUG nova.network.neutron [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1057.854838] env[68217]: DEBUG nova.compute.manager [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-vif-deleted-ff379aaa-198c-405e-ae1f-591e6f4cc070 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1057.855067] env[68217]: INFO nova.compute.manager [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Neutron deleted interface ff379aaa-198c-405e-ae1f-591e6f4cc070; detaching it from the instance and deleting it from the info cache [ 1057.855339] env[68217]: DEBUG nova.network.neutron [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.894691] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e45a478-7fc7-4485-9720-5d5c9e9d1d39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.903232] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf972ada-3815-4ade-a981-64e4b01b5a14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.939514] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d99a82a-7155-4577-9ed2-297c45dfad2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.948172] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e19a7277-b008-405a-ac02-87e05838243a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.962628] env[68217]: DEBUG nova.compute.provider_tree [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1058.179015] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 8.249s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.358723] env[68217]: DEBUG oslo_concurrency.lockutils [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.358903] env[68217]: DEBUG oslo_concurrency.lockutils [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Acquired lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.359790] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7ef1c32-199a-4d02-8269-4e9703d5db9c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.378919] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54620781-de0b-4e6b-8a8e-3f94da901924 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.405281] env[68217]: DEBUG nova.virt.vmwareapi.vmops [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfiguring VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1058.407823] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efef0a4e-3d08-42ee-a36b-37c6c9f3cb60 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.428256] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Waiting for the task: (returnval){ [ 1058.428256] env[68217]: value = "task-2961898" [ 1058.428256] env[68217]: _type = "Task" [ 1058.428256] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.436211] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.442682] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1058.465170] env[68217]: DEBUG nova.scheduler.client.report [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1058.570499] env[68217]: INFO nova.network.neutron [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Port ff379aaa-198c-405e-ae1f-591e6f4cc070 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1058.570985] env[68217]: DEBUG nova.network.neutron [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [{"id": "05c67562-5b0b-421a-a707-1d10d90f4a71", "address": "fa:16:3e:f2:62:b9", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.193", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap05c67562-5b", "ovs_interfaceid": "05c67562-5b0b-421a-a707-1d10d90f4a71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.896937] env[68217]: DEBUG nova.compute.manager [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Received event network-changed-018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1058.896937] env[68217]: DEBUG nova.compute.manager [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Refreshing instance network info cache due to event network-changed-018f74db-1dcd-49e4-bd11-2ab20c34e986. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1058.897104] env[68217]: DEBUG oslo_concurrency.lockutils [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] Acquiring lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1058.897258] env[68217]: DEBUG oslo_concurrency.lockutils [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] Acquired lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1058.897419] env[68217]: DEBUG nova.network.neutron [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Refreshing network info cache for port 018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1058.942817] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.969639] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.886s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1058.972444] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.492s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1058.973961] env[68217]: INFO nova.compute.claims [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1058.997580] env[68217]: INFO nova.scheduler.client.report [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Deleted allocations for instance 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f [ 1059.073830] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1059.441992] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.443535] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.443754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.443932] env[68217]: DEBUG nova.compute.manager [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Going to confirm migration 6 {{(pid=68217) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1059.458931] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "ffff4cf4-f663-4965-84d1-8351bfde1252" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.459161] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.459385] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1059.459579] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1059.459751] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.462273] env[68217]: INFO nova.compute.manager [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Terminating instance [ 1059.504799] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f7eab6cd-5299-419c-95de-d163d4d846bf tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.150s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.578258] env[68217]: DEBUG oslo_concurrency.lockutils [None req-47031ce8-f373-46a3-88a9-f1d05114a2fb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-35c4ab95-fc14-4bd4-a2a5-64f15f070b88-bd03a7d5-905c-4c0d-9972-f46ba5682fd7" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.599s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1059.602417] env[68217]: DEBUG nova.network.neutron [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updated VIF entry in instance network info cache for port 018f74db-1dcd-49e4-bd11-2ab20c34e986. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1059.602765] env[68217]: DEBUG nova.network.neutron [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updating instance_info_cache with network_info: [{"id": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "address": "fa:16:3e:23:9b:2c", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap018f74db-1d", "ovs_interfaceid": "018f74db-1dcd-49e4-bd11-2ab20c34e986", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1059.941477] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.966109] env[68217]: DEBUG nova.compute.manager [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1059.966328] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1059.967305] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06c02539-c324-4f11-aca8-3383748102f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.974535] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1059.974776] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2b535a50-f10e-4ba3-8aeb-2f8959ed45e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.978671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1059.978843] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1059.979032] env[68217]: DEBUG nova.network.neutron [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1059.979205] env[68217]: DEBUG nova.objects.instance [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'info_cache' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1059.984962] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1059.984962] env[68217]: value = "task-2961899" [ 1059.984962] env[68217]: _type = "Task" [ 1059.984962] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.993630] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961899, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.105648] env[68217]: DEBUG oslo_concurrency.lockutils [req-e38b2529-0a69-4a5a-89a8-8d5ee7247e59 req-8008412f-99fb-4bb0-a10f-3dd7b86481e9 service nova] Releasing lock "refresh_cache-a4dcc7fb-83e4-4bb9-9c98-9569daee1435" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1060.196040] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-584286c1-a8c9-4fcd-93fd-3b1ad70cbcf0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.203958] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f35bbce5-c604-47b3-a356-df0e3ba2733f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.236161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec035c15-8332-496e-a598-37b6cb6e06b4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.243211] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-022caf8b-50a2-4d78-8948-96665fb6e204 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.258313] env[68217]: DEBUG nova.compute.provider_tree [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1060.441566] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.494944] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961899, 'name': PowerOffVM_Task, 'duration_secs': 0.230415} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.495378] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1060.495378] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1060.495615] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f5bf5cd7-d35d-4c93-a1aa-cdf3a429d160 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.558128] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1060.558373] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1060.558598] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleting the datastore file [datastore1] ffff4cf4-f663-4965-84d1-8351bfde1252 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1060.558804] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eac9dfa1-5189-4b46-8293-9fc61762b930 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.565682] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for the task: (returnval){ [ 1060.565682] env[68217]: value = "task-2961901" [ 1060.565682] env[68217]: _type = "Task" [ 1060.565682] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.574013] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961901, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.761829] env[68217]: DEBUG nova.scheduler.client.report [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1060.942100] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.075366] env[68217]: DEBUG oslo_vmware.api [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Task: {'id': task-2961901, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.128549} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.075617] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1061.075821] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1061.076014] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1061.076194] env[68217]: INFO nova.compute.manager [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1061.076430] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1061.076617] env[68217]: DEBUG nova.compute.manager [-] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1061.076712] env[68217]: DEBUG nova.network.neutron [-] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1061.266415] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1061.266955] env[68217]: DEBUG nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1061.269993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.334s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1061.271398] env[68217]: INFO nova.compute.claims [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1061.443494] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.496657] env[68217]: DEBUG nova.network.neutron [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.617554] env[68217]: DEBUG nova.compute.manager [req-029b9451-9442-4e98-a9f4-7b00b540ed84 req-c70dafca-fc95-4494-8995-722cfee9b694 service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Received event network-vif-deleted-a112e6da-1e76-4618-b45e-229cbb5c0ebd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1061.617762] env[68217]: INFO nova.compute.manager [req-029b9451-9442-4e98-a9f4-7b00b540ed84 req-c70dafca-fc95-4494-8995-722cfee9b694 service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Neutron deleted interface a112e6da-1e76-4618-b45e-229cbb5c0ebd; detaching it from the instance and deleting it from the info cache [ 1061.617936] env[68217]: DEBUG nova.network.neutron [req-029b9451-9442-4e98-a9f4-7b00b540ed84 req-c70dafca-fc95-4494-8995-722cfee9b694 service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.777347] env[68217]: DEBUG nova.compute.utils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1061.782780] env[68217]: DEBUG nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1061.782780] env[68217]: DEBUG nova.network.neutron [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1061.844234] env[68217]: DEBUG nova.policy [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '959602cf63674fb1a4edccb4e452e614', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c0522eaa6ebc48a28651f6b3bf1434f3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1061.944291] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.999053] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1061.999368] env[68217]: DEBUG nova.objects.instance [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'migration_context' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1062.098198] env[68217]: DEBUG nova.network.neutron [-] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1062.120123] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e05a1aef-8686-46a4-bacd-532da8626ba0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.133496] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a09f13a7-ad46-49ba-9d5b-757a7ee31e78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.168704] env[68217]: DEBUG nova.compute.manager [req-029b9451-9442-4e98-a9f4-7b00b540ed84 req-c70dafca-fc95-4494-8995-722cfee9b694 service nova] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Detach interface failed, port_id=a112e6da-1e76-4618-b45e-229cbb5c0ebd, reason: Instance ffff4cf4-f663-4965-84d1-8351bfde1252 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1062.229353] env[68217]: DEBUG nova.network.neutron [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Successfully created port: c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1062.283712] env[68217]: DEBUG nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1062.446135] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.489604] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8923b02c-9a25-40b7-b90d-012b0554848c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.496824] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45cd3d00-0a7b-497b-8e50-fb24e3359ffe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.501898] env[68217]: DEBUG nova.objects.base [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Object Instance<04149a5c-d1b5-4d71-a1ca-44696506a40d> lazy-loaded attributes: info_cache,migration_context {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1062.503054] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a469ae-65db-4129-b64f-fc9cfb35d3fc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.530700] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c5ecd1d-450b-41ea-afbc-45f78893b235 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.546701] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f6c3c4d5-e850-4286-b783-a19e161fcc27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.551893] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59aecb9d-4bcc-4d93-a91b-c9a33e466a55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.556643] env[68217]: DEBUG oslo_vmware.api [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1062.556643] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d6ca5-e21d-0ba1-5634-12fdf35082b2" [ 1062.556643] env[68217]: _type = "Task" [ 1062.556643] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1062.567095] env[68217]: DEBUG nova.compute.provider_tree [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1062.573186] env[68217]: DEBUG oslo_vmware.api [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d6ca5-e21d-0ba1-5634-12fdf35082b2, 'name': SearchDatastore_Task, 'duration_secs': 0.017404} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.573442] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.600562] env[68217]: INFO nova.compute.manager [-] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Took 1.52 seconds to deallocate network for instance. [ 1062.723441] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1062.723749] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1062.944831] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.070047] env[68217]: DEBUG nova.scheduler.client.report [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1063.106598] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.225941] env[68217]: DEBUG nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1063.296177] env[68217]: DEBUG nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1063.320562] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1063.320820] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1063.320918] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1063.321115] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1063.321266] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1063.321411] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1063.321611] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1063.321767] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1063.321927] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1063.322120] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1063.322279] env[68217]: DEBUG nova.virt.hardware [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1063.323131] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8665234c-1394-46b6-9243-5be6b22755f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.331392] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce03afa6-a5f2-41f7-bb7d-a89ff0de32ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.445204] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1063.574930] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.305s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.575513] env[68217]: DEBUG nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1063.578220] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 1.005s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.676438] env[68217]: DEBUG nova.compute.manager [req-5b901568-bc3c-41b8-8696-32074730146b req-c1dc1c76-dbfc-4cde-8f1e-0f5457d5ece3 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-vif-plugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1063.677071] env[68217]: DEBUG oslo_concurrency.lockutils [req-5b901568-bc3c-41b8-8696-32074730146b req-c1dc1c76-dbfc-4cde-8f1e-0f5457d5ece3 service nova] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.677321] env[68217]: DEBUG oslo_concurrency.lockutils [req-5b901568-bc3c-41b8-8696-32074730146b req-c1dc1c76-dbfc-4cde-8f1e-0f5457d5ece3 service nova] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1063.677483] env[68217]: DEBUG oslo_concurrency.lockutils [req-5b901568-bc3c-41b8-8696-32074730146b req-c1dc1c76-dbfc-4cde-8f1e-0f5457d5ece3 service nova] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1063.677652] env[68217]: DEBUG nova.compute.manager [req-5b901568-bc3c-41b8-8696-32074730146b req-c1dc1c76-dbfc-4cde-8f1e-0f5457d5ece3 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] No waiting events found dispatching network-vif-plugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1063.677826] env[68217]: WARNING nova.compute.manager [req-5b901568-bc3c-41b8-8696-32074730146b req-c1dc1c76-dbfc-4cde-8f1e-0f5457d5ece3 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received unexpected event network-vif-plugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 for instance with vm_state building and task_state spawning. [ 1063.744971] env[68217]: DEBUG nova.network.neutron [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Successfully updated port: c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1063.749777] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1063.946416] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.081698] env[68217]: DEBUG nova.compute.utils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1064.085715] env[68217]: DEBUG nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1064.085881] env[68217]: DEBUG nova.network.neutron [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1064.122947] env[68217]: DEBUG nova.policy [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0fd08981ea724019826d597a1c8b4ecd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '0d6233e9874c41329f81c990f8bc72b1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1064.247748] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1064.247930] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1064.248023] env[68217]: DEBUG nova.network.neutron [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1064.376367] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fe7cf36-e5c3-4251-bd9a-25ace6260aec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.384303] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d188309c-9258-4d9b-9530-e4e69ad979bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.416261] env[68217]: DEBUG nova.network.neutron [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Successfully created port: 1916eda4-ae94-4d60-800d-2f94dc473c1e {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1064.418591] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c65b6b6-56f5-4b6b-96b9-c8397103151d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.426714] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153cc8a5-0a6f-4f09-8972-84dca496c5b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.440458] env[68217]: DEBUG nova.compute.provider_tree [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1064.449229] env[68217]: DEBUG oslo_vmware.api [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Task: {'id': task-2961898, 'name': ReconfigVM_Task, 'duration_secs': 5.75488} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.449639] env[68217]: DEBUG oslo_concurrency.lockutils [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] Releasing lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1064.449639] env[68217]: DEBUG nova.virt.vmwareapi.vmops [req-c7eed1d7-d35a-4f1e-ac64-510590e123a3 req-3e4d0fb9-f57c-44c3-af97-4c7f61ed89d0 service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Reconfigured VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1064.450707] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 6.008s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.450933] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1064.451290] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1064.451354] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1064.453195] env[68217]: INFO nova.compute.manager [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Terminating instance [ 1064.589882] env[68217]: DEBUG nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1064.792986] env[68217]: DEBUG nova.network.neutron [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1064.950162] env[68217]: DEBUG nova.scheduler.client.report [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1064.956275] env[68217]: DEBUG nova.compute.manager [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1064.956477] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1064.957351] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd3258c-06ae-4ff8-8c13-227056ae03b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.968779] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.971504] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ddf7e09e-4da9-420f-bc9d-8b045c64d7d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.978940] env[68217]: DEBUG oslo_vmware.api [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1064.978940] env[68217]: value = "task-2961902" [ 1064.978940] env[68217]: _type = "Task" [ 1064.978940] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.987728] env[68217]: DEBUG oslo_vmware.api [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961902, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.030643] env[68217]: DEBUG nova.network.neutron [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1065.488505] env[68217]: DEBUG oslo_vmware.api [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961902, 'name': PowerOffVM_Task, 'duration_secs': 0.177636} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1065.488853] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1065.490148] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1065.490148] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f48a783-cd27-4dbf-a3b3-285debc34da0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.536775] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1065.537337] env[68217]: DEBUG nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Instance network_info: |[{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1065.538173] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:5d:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0f23ace-2be2-4dca-b47a-a5b77ba68dd3', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1065.546226] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1065.546445] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1065.546668] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9cf359c1-a8b2-48b1-bfb9-2731181c15f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.564346] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1065.564545] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1065.564718] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleting the datastore file [datastore1] 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1065.565340] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-26b870f2-0069-4c93-8af2-b4343505d8f5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.569926] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1065.569926] env[68217]: value = "task-2961904" [ 1065.569926] env[68217]: _type = "Task" [ 1065.569926] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.573714] env[68217]: DEBUG oslo_vmware.api [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1065.573714] env[68217]: value = "task-2961905" [ 1065.573714] env[68217]: _type = "Task" [ 1065.573714] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1065.579820] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961904, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.584769] env[68217]: DEBUG oslo_vmware.api [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961905, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1065.599296] env[68217]: DEBUG nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1065.644217] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1065.644572] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1065.644831] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1065.645154] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1065.645337] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1065.645603] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1065.645942] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1065.646226] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1065.646499] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1065.647012] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1065.647296] env[68217]: DEBUG nova.virt.hardware [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1065.648415] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232687a1-84a9-4494-a431-7f43b2229743 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.658170] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ebe5251-d399-4b88-9d31-79f4dabfece6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.706557] env[68217]: DEBUG nova.compute.manager [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1065.706807] env[68217]: DEBUG nova.compute.manager [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing instance network info cache due to event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1065.707096] env[68217]: DEBUG oslo_concurrency.lockutils [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] Acquiring lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1065.707289] env[68217]: DEBUG oslo_concurrency.lockutils [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] Acquired lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1065.707503] env[68217]: DEBUG nova.network.neutron [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1065.881986] env[68217]: DEBUG nova.network.neutron [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Successfully updated port: 1916eda4-ae94-4d60-800d-2f94dc473c1e {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1065.960392] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.382s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.960617] env[68217]: DEBUG nova.compute.manager [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Resized/migrated instance is powered off. Setting vm_state to 'stopped'. {{(pid=68217) _confirm_resize /opt/stack/nova/nova/compute/manager.py:5376}} [ 1065.963643] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.857s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.963838] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1065.965899] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.217s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1065.967372] env[68217]: INFO nova.compute.claims [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1065.989917] env[68217]: INFO nova.scheduler.client.report [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Deleted allocations for instance ffff4cf4-f663-4965-84d1-8351bfde1252 [ 1066.087396] env[68217]: DEBUG oslo_vmware.api [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961905, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.17011} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.092386] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.092731] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1066.093058] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1066.093376] env[68217]: INFO nova.compute.manager [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1066.093714] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1066.093969] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961904, 'name': CreateVM_Task, 'duration_secs': 0.29947} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.094281] env[68217]: DEBUG nova.compute.manager [-] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1066.094438] env[68217]: DEBUG nova.network.neutron [-] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1066.096781] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1066.098557] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.098716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.099082] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1066.099675] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf4f0d40-f537-4182-a18d-cdf792cdc090 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.104324] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1066.104324] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5207e99b-a2aa-43c4-e3ee-d0ddc0bae905" [ 1066.104324] env[68217]: _type = "Task" [ 1066.104324] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.112582] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5207e99b-a2aa-43c4-e3ee-d0ddc0bae905, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.384830] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "refresh_cache-e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.384924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "refresh_cache-e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.385101] env[68217]: DEBUG nova.network.neutron [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1066.503231] env[68217]: DEBUG oslo_concurrency.lockutils [None req-68c7893f-6fe4-4987-a429-dee31ccf808d tempest-DeleteServersTestJSON-2028014361 tempest-DeleteServersTestJSON-2028014361-project-member] Lock "ffff4cf4-f663-4965-84d1-8351bfde1252" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.043s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1066.542799] env[68217]: INFO nova.scheduler.client.report [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted allocation for migration fd057df1-4b70-4d8f-8028-ac2b12d1f0b0 [ 1066.582249] env[68217]: DEBUG nova.network.neutron [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updated VIF entry in instance network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1066.582249] env[68217]: DEBUG nova.network.neutron [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1066.615846] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5207e99b-a2aa-43c4-e3ee-d0ddc0bae905, 'name': SearchDatastore_Task, 'duration_secs': 0.009442} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.616211] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1066.616455] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1066.616762] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1066.616952] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1066.617199] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1066.617486] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c955dff5-7436-4c91-8924-1f45f7377ee3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.626458] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1066.626629] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1066.627408] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-67f92ed1-2608-43bb-b809-e6751ca4bea8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.632555] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1066.632555] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a8c1f4-35f7-5166-6508-7be2d2319a8b" [ 1066.632555] env[68217]: _type = "Task" [ 1066.632555] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.641088] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a8c1f4-35f7-5166-6508-7be2d2319a8b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.917921] env[68217]: DEBUG nova.network.neutron [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1067.049309] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f332b55b-78ea-429f-b4f8-1c9d7038adb4 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 7.605s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.084846] env[68217]: DEBUG oslo_concurrency.lockutils [req-bf38cb58-84d3-4cf1-b876-929d862f4136 req-921958a2-e3fc-4c40-849f-0fc008f3e326 service nova] Releasing lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.088058] env[68217]: DEBUG nova.network.neutron [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Updating instance_info_cache with network_info: [{"id": "1916eda4-ae94-4d60-800d-2f94dc473c1e", "address": "fa:16:3e:c7:61:51", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1916eda4-ae", "ovs_interfaceid": "1916eda4-ae94-4d60-800d-2f94dc473c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.145493] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a8c1f4-35f7-5166-6508-7be2d2319a8b, 'name': SearchDatastore_Task, 'duration_secs': 0.008273} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.146306] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a6b0f22-896e-40a1-8be4-7491fed896a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.155549] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1067.155549] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52453fd7-6fa3-193a-74dd-5b1c102e5a62" [ 1067.155549] env[68217]: _type = "Task" [ 1067.155549] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.162183] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52453fd7-6fa3-193a-74dd-5b1c102e5a62, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.184704] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3c34f73-e2e1-4e58-a1f9-609ad9403e4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.191764] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ff2e63-e079-48ed-b1b6-fdab9c640ec2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.222696] env[68217]: DEBUG nova.network.neutron [-] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.223833] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f52661-9473-4357-abc3-0bb767e004b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.232212] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8925ce9-381f-4ffe-9860-8100318a9f18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.250489] env[68217]: DEBUG nova.compute.provider_tree [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1067.590222] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "refresh_cache-e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.590561] env[68217]: DEBUG nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Instance network_info: |[{"id": "1916eda4-ae94-4d60-800d-2f94dc473c1e", "address": "fa:16:3e:c7:61:51", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1916eda4-ae", "ovs_interfaceid": "1916eda4-ae94-4d60-800d-2f94dc473c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1067.590985] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c7:61:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3b7bf7d4-8e0c-4cee-84ba-244e73ef6379', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1916eda4-ae94-4d60-800d-2f94dc473c1e', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1067.598437] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1067.598643] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1067.598861] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-61befdbd-e506-4e02-ab09-06faea1f3af5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.618489] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1067.618489] env[68217]: value = "task-2961907" [ 1067.618489] env[68217]: _type = "Task" [ 1067.618489] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.630130] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961907, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.663508] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52453fd7-6fa3-193a-74dd-5b1c102e5a62, 'name': SearchDatastore_Task, 'duration_secs': 0.010205} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1067.663757] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1067.664045] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1067.664297] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4cf36954-2bb2-4412-a246-48b2b14a160a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.669514] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1067.669514] env[68217]: value = "task-2961908" [ 1067.669514] env[68217]: _type = "Task" [ 1067.669514] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1067.677065] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961908, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1067.727936] env[68217]: INFO nova.compute.manager [-] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Took 1.63 seconds to deallocate network for instance. [ 1067.736145] env[68217]: DEBUG nova.compute.manager [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Received event network-vif-plugged-1916eda4-ae94-4d60-800d-2f94dc473c1e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1067.736357] env[68217]: DEBUG oslo_concurrency.lockutils [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] Acquiring lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1067.736561] env[68217]: DEBUG oslo_concurrency.lockutils [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1067.736725] env[68217]: DEBUG oslo_concurrency.lockutils [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1067.736887] env[68217]: DEBUG nova.compute.manager [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] No waiting events found dispatching network-vif-plugged-1916eda4-ae94-4d60-800d-2f94dc473c1e {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1067.737086] env[68217]: WARNING nova.compute.manager [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Received unexpected event network-vif-plugged-1916eda4-ae94-4d60-800d-2f94dc473c1e for instance with vm_state building and task_state spawning. [ 1067.737262] env[68217]: DEBUG nova.compute.manager [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Received event network-changed-1916eda4-ae94-4d60-800d-2f94dc473c1e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1067.737417] env[68217]: DEBUG nova.compute.manager [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Refreshing instance network info cache due to event network-changed-1916eda4-ae94-4d60-800d-2f94dc473c1e. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1067.737597] env[68217]: DEBUG oslo_concurrency.lockutils [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] Acquiring lock "refresh_cache-e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1067.737732] env[68217]: DEBUG oslo_concurrency.lockutils [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] Acquired lock "refresh_cache-e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1067.737921] env[68217]: DEBUG nova.network.neutron [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Refreshing network info cache for port 1916eda4-ae94-4d60-800d-2f94dc473c1e {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1067.753863] env[68217]: DEBUG nova.scheduler.client.report [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1067.806679] env[68217]: DEBUG nova.objects.instance [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'flavor' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.129234] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961907, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.179380] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961908, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497793} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.179653] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1068.179869] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1068.180142] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d386f29e-5b5e-486a-b7b5-4bba21d635ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.187311] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1068.187311] env[68217]: value = "task-2961909" [ 1068.187311] env[68217]: _type = "Task" [ 1068.187311] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.195119] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961909, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.235608] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1068.259237] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1068.259776] env[68217]: DEBUG nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1068.262959] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.027s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1068.263249] env[68217]: DEBUG nova.objects.instance [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'resources' on Instance uuid 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.311167] env[68217]: DEBUG oslo_concurrency.lockutils [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.311364] env[68217]: DEBUG oslo_concurrency.lockutils [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.311519] env[68217]: DEBUG nova.network.neutron [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1068.311692] env[68217]: DEBUG nova.objects.instance [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'info_cache' on Instance uuid 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.538803] env[68217]: DEBUG nova.network.neutron [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Updated VIF entry in instance network info cache for port 1916eda4-ae94-4d60-800d-2f94dc473c1e. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1068.539176] env[68217]: DEBUG nova.network.neutron [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Updating instance_info_cache with network_info: [{"id": "1916eda4-ae94-4d60-800d-2f94dc473c1e", "address": "fa:16:3e:c7:61:51", "network": {"id": "dfac782a-c697-45d9-b09f-08c6289589ca", "bridge": "br-int", "label": "tempest-ServersTestJSON-314506067-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "0d6233e9874c41329f81c990f8bc72b1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3b7bf7d4-8e0c-4cee-84ba-244e73ef6379", "external-id": "nsx-vlan-transportzone-423", "segmentation_id": 423, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1916eda4-ae", "ovs_interfaceid": "1916eda4-ae94-4d60-800d-2f94dc473c1e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1068.628536] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961907, 'name': CreateVM_Task, 'duration_secs': 0.526666} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.628865] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1068.629419] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1068.629539] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1068.629814] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1068.630066] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-da429f96-1b2a-4ba9-bc23-1064ac97e97e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.635400] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1068.635400] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a7ede1-8f6f-41ac-c94f-7acbcecab783" [ 1068.635400] env[68217]: _type = "Task" [ 1068.635400] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.642737] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a7ede1-8f6f-41ac-c94f-7acbcecab783, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.696303] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961909, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060899} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1068.696544] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1068.697319] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4662ef85-b4f4-4940-83fd-923d34ac7a40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.718295] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1068.718527] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9710fd85-45c6-48de-b81e-5a1265117162 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.737307] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1068.737307] env[68217]: value = "task-2961910" [ 1068.737307] env[68217]: _type = "Task" [ 1068.737307] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1068.744270] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961910, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1068.764738] env[68217]: DEBUG nova.compute.utils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1068.766170] env[68217]: DEBUG nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1068.766390] env[68217]: DEBUG nova.network.neutron [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1068.806105] env[68217]: DEBUG nova.policy [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b76681b9ef1446dda7a508c8ade75e69', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '904fd1b1eb9d4ab8bd1ea9967249bc29', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1068.815129] env[68217]: DEBUG nova.objects.base [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Object Instance<04149a5c-d1b5-4d71-a1ca-44696506a40d> lazy-loaded attributes: flavor,info_cache {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1068.960030] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e613eb-ee03-4653-be3f-d9f1c28a560f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.967914] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd584cfb-65e2-4197-adbe-e05f3d730a3f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1068.999887] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71b99b2e-d08a-4a1d-8002-d02f7df112eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.007080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318f9bbb-5022-4bee-94ff-9e34fbde27b4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.020194] env[68217]: DEBUG nova.compute.provider_tree [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1069.043757] env[68217]: DEBUG oslo_concurrency.lockutils [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] Releasing lock "refresh_cache-e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.043757] env[68217]: DEBUG nova.compute.manager [req-f604b684-637f-445c-9d6d-1fc4d433c330 req-25ad2328-572f-4b64-bbc6-5b9f9ac794aa service nova] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Received event network-vif-deleted-05c67562-5b0b-421a-a707-1d10d90f4a71 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1069.080926] env[68217]: DEBUG nova.network.neutron [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Successfully created port: 9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1069.145155] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a7ede1-8f6f-41ac-c94f-7acbcecab783, 'name': SearchDatastore_Task, 'duration_secs': 0.008912} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.145461] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1069.145697] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1069.145931] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1069.146090] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1069.146271] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1069.146571] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6ad63985-0bca-4bb2-b5f8-3044ece1ea01 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.155045] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1069.155234] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1069.155954] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-41a2a429-9c44-4a63-af8e-cd7101677597 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.161208] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1069.161208] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5235fca0-cf80-b23e-9bd0-30c2ed6fc3fc" [ 1069.161208] env[68217]: _type = "Task" [ 1069.161208] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.168700] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5235fca0-cf80-b23e-9bd0-30c2ed6fc3fc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.247286] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961910, 'name': ReconfigVM_Task, 'duration_secs': 0.309359} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.247908] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1069.248193] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eea0ce07-2a06-402b-80b0-3f8cda9c388b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.254759] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1069.254759] env[68217]: value = "task-2961911" [ 1069.254759] env[68217]: _type = "Task" [ 1069.254759] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.263105] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961911, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.268766] env[68217]: DEBUG nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1069.522862] env[68217]: DEBUG nova.scheduler.client.report [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1069.563730] env[68217]: DEBUG nova.network.neutron [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [{"id": "772726db-4382-4051-9a7d-abfc670d5c9b", "address": "fa:16:3e:49:ce:f8", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap772726db-43", "ovs_interfaceid": "772726db-4382-4051-9a7d-abfc670d5c9b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1069.671511] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5235fca0-cf80-b23e-9bd0-30c2ed6fc3fc, 'name': SearchDatastore_Task, 'duration_secs': 0.007907} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.672338] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8412fac-0914-4de9-845d-a55612dbf884 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.678063] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1069.678063] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d80013-097f-c0c0-c573-a6eaf2137423" [ 1069.678063] env[68217]: _type = "Task" [ 1069.678063] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.684604] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d80013-097f-c0c0-c573-a6eaf2137423, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1069.764289] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961911, 'name': Rename_Task, 'duration_secs': 0.144983} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1069.764565] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1069.764818] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62f0f77d-f968-44d5-93cd-e84a82e67e32 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.771060] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1069.771060] env[68217]: value = "task-2961912" [ 1069.771060] env[68217]: _type = "Task" [ 1069.771060] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1069.787028] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961912, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.028033] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.765s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.050234] env[68217]: INFO nova.scheduler.client.report [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted allocations for instance 35c4ab95-fc14-4bd4-a2a5-64f15f070b88 [ 1070.066594] env[68217]: DEBUG oslo_concurrency.lockutils [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-04149a5c-d1b5-4d71-a1ca-44696506a40d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.188205] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d80013-097f-c0c0-c573-a6eaf2137423, 'name': SearchDatastore_Task, 'duration_secs': 0.008724} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.188490] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1070.188730] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e3dfe047-7cdc-4a1d-8af3-6437b5555ac4/e3dfe047-7cdc-4a1d-8af3-6437b5555ac4.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1070.188983] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f26526ed-e1cb-49ec-89b3-9609bc409f2c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.195184] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1070.195184] env[68217]: value = "task-2961913" [ 1070.195184] env[68217]: _type = "Task" [ 1070.195184] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.202981] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961913, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.283326] env[68217]: DEBUG nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1070.285533] env[68217]: DEBUG oslo_vmware.api [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2961912, 'name': PowerOnVM_Task, 'duration_secs': 0.411379} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.286153] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1070.286405] env[68217]: INFO nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Took 6.99 seconds to spawn the instance on the hypervisor. [ 1070.286638] env[68217]: DEBUG nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1070.287550] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2245d582-db6e-4721-8732-59bc53b6fd13 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.311533] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1070.311688] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1070.312272] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1070.312272] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1070.312272] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1070.312430] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1070.312527] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1070.312705] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1070.312889] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1070.313064] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1070.313308] env[68217]: DEBUG nova.virt.hardware [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1070.314166] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab81101b-5d09-4a2e-bf8e-86d7066a9e16 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.322190] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baecb96a-c6cf-41eb-acfb-983c0d65f0e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.558840] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cbae3a21-9578-44fd-a273-f5433188f1cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "35c4ab95-fc14-4bd4-a2a5-64f15f070b88" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.108s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.708229] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961913, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471069} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1070.708658] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] e3dfe047-7cdc-4a1d-8af3-6437b5555ac4/e3dfe047-7cdc-4a1d-8af3-6437b5555ac4.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1070.709025] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1070.709500] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-7396687b-a4b1-4239-a1a5-df2c52fdd373 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1070.721022] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1070.721022] env[68217]: value = "task-2961914" [ 1070.721022] env[68217]: _type = "Task" [ 1070.721022] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1070.731469] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961914, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1070.814051] env[68217]: INFO nova.compute.manager [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Took 14.35 seconds to build instance. [ 1070.978764] env[68217]: DEBUG nova.compute.manager [req-b8e32656-a8e6-44dd-8329-81a28a5ae829 req-62e2dd32-f2ea-4700-8e66-755ec57e4335 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Received event network-vif-plugged-9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1070.979047] env[68217]: DEBUG oslo_concurrency.lockutils [req-b8e32656-a8e6-44dd-8329-81a28a5ae829 req-62e2dd32-f2ea-4700-8e66-755ec57e4335 service nova] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1070.979299] env[68217]: DEBUG oslo_concurrency.lockutils [req-b8e32656-a8e6-44dd-8329-81a28a5ae829 req-62e2dd32-f2ea-4700-8e66-755ec57e4335 service nova] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1070.979475] env[68217]: DEBUG oslo_concurrency.lockutils [req-b8e32656-a8e6-44dd-8329-81a28a5ae829 req-62e2dd32-f2ea-4700-8e66-755ec57e4335 service nova] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1070.979644] env[68217]: DEBUG nova.compute.manager [req-b8e32656-a8e6-44dd-8329-81a28a5ae829 req-62e2dd32-f2ea-4700-8e66-755ec57e4335 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] No waiting events found dispatching network-vif-plugged-9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1070.979811] env[68217]: WARNING nova.compute.manager [req-b8e32656-a8e6-44dd-8329-81a28a5ae829 req-62e2dd32-f2ea-4700-8e66-755ec57e4335 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Received unexpected event network-vif-plugged-9414aa68-f0b4-452d-a959-86e8f0acf53e for instance with vm_state building and task_state spawning. [ 1071.060996] env[68217]: DEBUG nova.network.neutron [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Successfully updated port: 9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1071.073750] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1071.073999] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5c1aed65-4a0c-4242-801d-05a73bcac5d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.084774] env[68217]: DEBUG oslo_vmware.api [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1071.084774] env[68217]: value = "task-2961915" [ 1071.084774] env[68217]: _type = "Task" [ 1071.084774] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.096380] env[68217]: DEBUG oslo_vmware.api [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961915, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.232462] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961914, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071283} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.233074] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1071.234650] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e1ba05-b569-404f-b4a9-cebc7a7720b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.259042] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] e3dfe047-7cdc-4a1d-8af3-6437b5555ac4/e3dfe047-7cdc-4a1d-8af3-6437b5555ac4.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1071.259838] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c14b6c85-7072-4def-bb57-153bafd07cab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.282666] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1071.282666] env[68217]: value = "task-2961916" [ 1071.282666] env[68217]: _type = "Task" [ 1071.282666] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.292831] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961916, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1071.316556] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6f509643-bd8a-4472-8519-42ae5684f3a2 tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.862s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1071.564788] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.564788] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.564788] env[68217]: DEBUG nova.network.neutron [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1071.596713] env[68217]: DEBUG oslo_vmware.api [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961915, 'name': PowerOnVM_Task, 'duration_secs': 0.509952} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1071.598012] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1071.598388] env[68217]: DEBUG nova.compute.manager [None req-415d3ef8-b1d7-4a97-a8eb-5751c5beeb1f tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1071.599507] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d992cf2b-23c2-4f0f-b901-4f11b734ae56 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.664266] env[68217]: DEBUG nova.compute.manager [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1071.664369] env[68217]: DEBUG nova.compute.manager [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing instance network info cache due to event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1071.664586] env[68217]: DEBUG oslo_concurrency.lockutils [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] Acquiring lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1071.664731] env[68217]: DEBUG oslo_concurrency.lockutils [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] Acquired lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1071.664895] env[68217]: DEBUG nova.network.neutron [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1071.798263] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.129468] env[68217]: DEBUG nova.network.neutron [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1072.295266] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961916, 'name': ReconfigVM_Task, 'duration_secs': 0.758367} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.299472] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Reconfigured VM instance instance-0000006a to attach disk [datastore2] e3dfe047-7cdc-4a1d-8af3-6437b5555ac4/e3dfe047-7cdc-4a1d-8af3-6437b5555ac4.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1072.300171] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-4ed3e07d-187d-404b-b0f2-cb19d92d93f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.308493] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1072.308493] env[68217]: value = "task-2961917" [ 1072.308493] env[68217]: _type = "Task" [ 1072.308493] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.317669] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961917, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.484403] env[68217]: DEBUG nova.network.neutron [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updating instance_info_cache with network_info: [{"id": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "address": "fa:16:3e:12:ac:63", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9414aa68-f0", "ovs_interfaceid": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.687214] env[68217]: DEBUG nova.network.neutron [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updated VIF entry in instance network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1072.687643] env[68217]: DEBUG nova.network.neutron [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1072.820940] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961917, 'name': Rename_Task, 'duration_secs': 0.230906} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.821819] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1072.821819] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1bbab03c-d278-4f2b-9b6b-83709e4314eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.830428] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1072.830428] env[68217]: value = "task-2961918" [ 1072.830428] env[68217]: _type = "Task" [ 1072.830428] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.840041] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961918, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.991423] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1072.991423] env[68217]: DEBUG nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Instance network_info: |[{"id": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "address": "fa:16:3e:12:ac:63", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9414aa68-f0", "ovs_interfaceid": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1072.992513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.992513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.992513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1072.992513] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1072.992717] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1072.994832] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:12:ac:63', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9414aa68-f0b4-452d-a959-86e8f0acf53e', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1073.005026] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1073.005803] env[68217]: INFO nova.compute.manager [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Terminating instance [ 1073.007913] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1073.009328] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34a6f0d2-06a6-4baa-b40d-5759d528506d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.029763] env[68217]: DEBUG nova.compute.manager [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1073.029981] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1073.031256] env[68217]: DEBUG nova.compute.manager [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Received event network-changed-9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1073.031426] env[68217]: DEBUG nova.compute.manager [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Refreshing instance network info cache due to event network-changed-9414aa68-f0b4-452d-a959-86e8f0acf53e. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1073.031649] env[68217]: DEBUG oslo_concurrency.lockutils [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] Acquiring lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1073.031782] env[68217]: DEBUG oslo_concurrency.lockutils [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] Acquired lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1073.033983] env[68217]: DEBUG nova.network.neutron [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Refreshing network info cache for port 9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1073.034315] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a869402-0785-4b41-b67a-f37dec829bbb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.044472] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1073.046308] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-dcb779af-c5ec-449b-b954-4f6151bc66c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.048415] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1073.048415] env[68217]: value = "task-2961919" [ 1073.048415] env[68217]: _type = "Task" [ 1073.048415] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.055465] env[68217]: DEBUG oslo_vmware.api [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1073.055465] env[68217]: value = "task-2961920" [ 1073.055465] env[68217]: _type = "Task" [ 1073.055465] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.063247] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961919, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.069286] env[68217]: DEBUG oslo_vmware.api [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961920, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.193427] env[68217]: DEBUG oslo_concurrency.lockutils [req-4c6acb9f-bbe4-4271-8f90-335902e42d11 req-816e0497-3a3d-4a26-9989-e2a9edd2323d service nova] Releasing lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1073.345582] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961918, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.410695] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.410695] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "89136574-575c-47da-928c-bd7a5dbb3a98" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.563370] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961919, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.573173] env[68217]: DEBUG oslo_vmware.api [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961920, 'name': PowerOffVM_Task, 'duration_secs': 0.181476} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.573451] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1073.573625] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1073.573945] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ce4e0431-6f00-4c3d-96f1-b7e92db83e80 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.586231] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1073.586495] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1073.586675] env[68217]: DEBUG nova.compute.manager [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.587654] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9904c085-8f1e-4814-b399-633185530606 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.596231] env[68217]: DEBUG nova.compute.manager [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1073.596789] env[68217]: DEBUG nova.objects.instance [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'flavor' on Instance uuid d28bcf16-b081-4dc8-a975-2acaed222e15 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1073.665119] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1073.665374] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1073.665556] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleting the datastore file [datastore2] 04149a5c-d1b5-4d71-a1ca-44696506a40d {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1073.665828] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c6b2f7c-a92d-49a4-b15a-e0d915bafc67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.672966] env[68217]: DEBUG oslo_vmware.api [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1073.672966] env[68217]: value = "task-2961922" [ 1073.672966] env[68217]: _type = "Task" [ 1073.672966] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1073.684124] env[68217]: DEBUG oslo_vmware.api [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961922, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.777922] env[68217]: DEBUG nova.network.neutron [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updated VIF entry in instance network info cache for port 9414aa68-f0b4-452d-a959-86e8f0acf53e. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1073.778355] env[68217]: DEBUG nova.network.neutron [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updating instance_info_cache with network_info: [{"id": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "address": "fa:16:3e:12:ac:63", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9414aa68-f0", "ovs_interfaceid": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.842512] env[68217]: DEBUG oslo_vmware.api [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961918, 'name': PowerOnVM_Task, 'duration_secs': 0.57982} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.843122] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1073.843122] env[68217]: INFO nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Took 8.24 seconds to spawn the instance on the hypervisor. [ 1073.843370] env[68217]: DEBUG nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1073.844041] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ae6191a-b9be-4465-8de1-065f0f5fe8bf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.911784] env[68217]: DEBUG nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1074.059820] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961919, 'name': CreateVM_Task, 'duration_secs': 0.582399} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.059998] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1074.060741] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.060905] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.061391] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1074.061484] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-df47fe0b-3f5c-459a-b29a-caa5341c1742 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.067102] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1074.067102] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520ace17-b2ea-6850-fb7c-4d1dd19d86c9" [ 1074.067102] env[68217]: _type = "Task" [ 1074.067102] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.075129] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520ace17-b2ea-6850-fb7c-4d1dd19d86c9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.186980] env[68217]: DEBUG oslo_vmware.api [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961922, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.439759} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.187318] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1074.187502] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1074.187674] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1074.187840] env[68217]: INFO nova.compute.manager [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1074.188106] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1074.188304] env[68217]: DEBUG nova.compute.manager [-] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1074.188400] env[68217]: DEBUG nova.network.neutron [-] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1074.280698] env[68217]: DEBUG oslo_concurrency.lockutils [req-c43c94a5-a366-43f6-9208-6095777bbcf5 req-df329b2f-3d39-46bd-8b5a-e301f4d81f1d service nova] Releasing lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.370659] env[68217]: INFO nova.compute.manager [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Took 17.45 seconds to build instance. [ 1074.447242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1074.447521] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1074.449140] env[68217]: INFO nova.compute.claims [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1074.581340] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]520ace17-b2ea-6850-fb7c-4d1dd19d86c9, 'name': SearchDatastore_Task, 'duration_secs': 0.035343} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1074.581794] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1074.583029] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1074.583029] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1074.583029] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1074.583029] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1074.583732] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aeab2b8d-08f2-4a0b-889f-da4e636f6668 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.598075] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1074.598075] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1074.598075] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-33136d9b-dd42-4dc2-8689-7142e5cae677 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.606190] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1074.606190] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52333125-b03d-42dc-418a-dd23867e7911" [ 1074.606190] env[68217]: _type = "Task" [ 1074.606190] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.607965] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1074.608380] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-780a3c97-e9b2-4cfb-a1e3-5e6a493d2621 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1074.615049] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52333125-b03d-42dc-418a-dd23867e7911, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.617331] env[68217]: DEBUG oslo_vmware.api [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1074.617331] env[68217]: value = "task-2961923" [ 1074.617331] env[68217]: _type = "Task" [ 1074.617331] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1074.626902] env[68217]: DEBUG oslo_vmware.api [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961923, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1074.877030] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e1c07342-ab33-42ea-95d5-dbd39c093f0d tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.967s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.114411] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52333125-b03d-42dc-418a-dd23867e7911, 'name': SearchDatastore_Task, 'duration_secs': 0.017984} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.117117] env[68217]: DEBUG nova.compute.manager [req-8fa61cc1-89ac-4d2f-a2d8-145857638c81 req-acac1c22-904e-4d56-95e8-8f947fb1561e service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Received event network-vif-deleted-772726db-4382-4051-9a7d-abfc670d5c9b {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1075.117315] env[68217]: INFO nova.compute.manager [req-8fa61cc1-89ac-4d2f-a2d8-145857638c81 req-acac1c22-904e-4d56-95e8-8f947fb1561e service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Neutron deleted interface 772726db-4382-4051-9a7d-abfc670d5c9b; detaching it from the instance and deleting it from the info cache [ 1075.117490] env[68217]: DEBUG nova.network.neutron [req-8fa61cc1-89ac-4d2f-a2d8-145857638c81 req-acac1c22-904e-4d56-95e8-8f947fb1561e service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.118469] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-592726c9-ae19-455a-ad7b-ce1b91c61e2d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.132181] env[68217]: DEBUG oslo_vmware.api [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961923, 'name': PowerOffVM_Task, 'duration_secs': 0.201671} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.133232] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1075.133480] env[68217]: DEBUG nova.compute.manager [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.133832] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1075.133832] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5216f206-acc0-e666-4ff3-cf3a748ad235" [ 1075.133832] env[68217]: _type = "Task" [ 1075.133832] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.134766] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf686072-1973-4d45-baf8-a1f6266c7cbe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.148214] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5216f206-acc0-e666-4ff3-cf3a748ad235, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.211767] env[68217]: DEBUG nova.network.neutron [-] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1075.624582] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c4ee2c5-87fb-4ba3-b830-00520b0ef47f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.636318] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fedd365-1c1d-4014-98f6-42e8ef8d5103 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.656581] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f9ceb18f-8353-4270-99fc-0956f5ef5bb7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.070s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1075.663998] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5216f206-acc0-e666-4ff3-cf3a748ad235, 'name': SearchDatastore_Task, 'duration_secs': 0.017153} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1075.664298] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1075.664558] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b/2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1075.664818] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-90b7fbec-eb0c-434a-a095-01d1d85f8436 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.685316] env[68217]: DEBUG nova.compute.manager [req-8fa61cc1-89ac-4d2f-a2d8-145857638c81 req-acac1c22-904e-4d56-95e8-8f947fb1561e service nova] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Detach interface failed, port_id=772726db-4382-4051-9a7d-abfc670d5c9b, reason: Instance 04149a5c-d1b5-4d71-a1ca-44696506a40d could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1075.685818] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1075.685818] env[68217]: value = "task-2961924" [ 1075.685818] env[68217]: _type = "Task" [ 1075.685818] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.697404] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961924, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.713355] env[68217]: INFO nova.compute.manager [-] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Took 1.52 seconds to deallocate network for instance. [ 1075.728629] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0279e65-6fe6-4459-abe7-64c4c69fa741 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.737032] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a498cb9-3409-43e2-81ef-4c8eca8675d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.770490] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67040593-28dc-44a4-93b4-959c2c4fe486 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.778934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43d19a55-d0ac-497b-b0f8-40e9fda50217 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.795292] env[68217]: DEBUG nova.compute.provider_tree [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1075.888450] env[68217]: DEBUG oslo_concurrency.lockutils [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1075.888784] env[68217]: DEBUG oslo_concurrency.lockutils [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1075.888968] env[68217]: DEBUG nova.compute.manager [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1075.889913] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41131cdd-810b-4278-a455-71ac0acc08b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.898878] env[68217]: DEBUG nova.compute.manager [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1075.899608] env[68217]: DEBUG nova.objects.instance [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lazy-loading 'flavor' on Instance uuid e3dfe047-7cdc-4a1d-8af3-6437b5555ac4 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.052667] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "8f0d5766-005e-459d-b9f4-e46b435e43b8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.052976] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.092091] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.092331] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.117484] env[68217]: DEBUG nova.objects.instance [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'flavor' on Instance uuid d28bcf16-b081-4dc8-a975-2acaed222e15 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.198641] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961924, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.472349} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.198814] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b/2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1076.199049] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1076.199316] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0ff67b0-0963-48f3-b7d6-c49b929d0c44 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.206721] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1076.206721] env[68217]: value = "task-2961925" [ 1076.206721] env[68217]: _type = "Task" [ 1076.206721] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.215371] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961925, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.220493] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.299072] env[68217]: DEBUG nova.scheduler.client.report [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1076.307140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "1faf45fb-a3b0-4647-b63d-3f51695b6171" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.307408] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.307609] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "1faf45fb-a3b0-4647-b63d-3f51695b6171-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1076.308379] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.308379] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.310387] env[68217]: INFO nova.compute.manager [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Terminating instance [ 1076.557225] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1076.597934] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1076.623168] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1076.623372] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1076.623548] env[68217]: DEBUG nova.network.neutron [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1076.623734] env[68217]: DEBUG nova.objects.instance [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'info_cache' on Instance uuid d28bcf16-b081-4dc8-a975-2acaed222e15 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1076.716864] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961925, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070814} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.717239] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1076.718064] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ddebb85-c8fa-417f-9eee-a90b5f36a27a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.741211] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b/2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1076.741552] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b2c29ff4-f59c-420e-9b66-aba351b8d89f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.762361] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1076.762361] env[68217]: value = "task-2961926" [ 1076.762361] env[68217]: _type = "Task" [ 1076.762361] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.770928] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961926, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.803978] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.356s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.804530] env[68217]: DEBUG nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1076.807816] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.587s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1076.808039] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1076.813651] env[68217]: DEBUG nova.compute.manager [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1076.813846] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1076.814688] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a60c1c2-97b0-4b22-bc9a-eb76f57883f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.823697] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.823958] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc25ec9e-e738-4e42-a6d0-29d200f33ba9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.829833] env[68217]: DEBUG oslo_vmware.api [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1076.829833] env[68217]: value = "task-2961927" [ 1076.829833] env[68217]: _type = "Task" [ 1076.829833] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.834236] env[68217]: INFO nova.scheduler.client.report [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted allocations for instance 04149a5c-d1b5-4d71-a1ca-44696506a40d [ 1076.843749] env[68217]: DEBUG oslo_vmware.api [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961927, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.907272] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1076.907659] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4cb68b13-11bb-4f83-a29a-7cc88131d0ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.915307] env[68217]: DEBUG oslo_vmware.api [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1076.915307] env[68217]: value = "task-2961928" [ 1076.915307] env[68217]: _type = "Task" [ 1076.915307] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.923682] env[68217]: DEBUG oslo_vmware.api [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961928, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.084527] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.084807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1077.086417] env[68217]: INFO nova.compute.claims [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1077.119254] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1077.128566] env[68217]: DEBUG nova.objects.base [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1077.275158] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961926, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.309767] env[68217]: DEBUG nova.compute.utils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1077.311216] env[68217]: DEBUG nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1077.311388] env[68217]: DEBUG nova.network.neutron [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1077.339635] env[68217]: DEBUG oslo_vmware.api [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961927, 'name': PowerOffVM_Task, 'duration_secs': 0.342678} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.339890] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.340067] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1077.340500] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-3233df5c-8d5c-4ce1-b313-43c8a38964ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.348702] env[68217]: DEBUG nova.policy [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1077.350369] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1adf53af-2a61-4a4a-bce4-2693f2ead54a tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "04149a5c-d1b5-4d71-a1ca-44696506a40d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.358s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1077.398932] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1077.399176] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1077.399361] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleting the datastore file [datastore1] 1faf45fb-a3b0-4647-b63d-3f51695b6171 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1077.399631] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47cffa72-ccac-4b82-a798-b2319d5dbda2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.406224] env[68217]: DEBUG oslo_vmware.api [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1077.406224] env[68217]: value = "task-2961930" [ 1077.406224] env[68217]: _type = "Task" [ 1077.406224] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.414988] env[68217]: DEBUG oslo_vmware.api [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.424354] env[68217]: DEBUG oslo_vmware.api [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961928, 'name': PowerOffVM_Task, 'duration_secs': 0.21631} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.424622] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1077.424838] env[68217]: DEBUG nova.compute.manager [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1077.425994] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfb904d-6178-4f83-915a-08316510ed19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.772788] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961926, 'name': ReconfigVM_Task, 'duration_secs': 0.755099} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.773320] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b/2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1077.774098] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8125e6c7-5fe8-4b0e-9421-6748a2409232 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1077.780545] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1077.780545] env[68217]: value = "task-2961931" [ 1077.780545] env[68217]: _type = "Task" [ 1077.780545] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1077.788282] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961931, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1077.815184] env[68217]: DEBUG nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1077.882626] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.883127] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.883414] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.885218] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.885218] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.885218] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.885218] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.885218] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1077.885218] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1077.919249] env[68217]: DEBUG oslo_vmware.api [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.134479} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1077.919249] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1077.919249] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1077.919249] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1077.919249] env[68217]: INFO nova.compute.manager [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1077.919249] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1077.919249] env[68217]: DEBUG nova.compute.manager [-] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1077.919249] env[68217]: DEBUG nova.network.neutron [-] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1077.937361] env[68217]: DEBUG oslo_concurrency.lockutils [None req-28fab1c2-1bb8-446e-969f-a7f7c0a55fe2 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.048s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1078.269377] env[68217]: DEBUG nova.network.neutron [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Successfully created port: 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1078.294716] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961931, 'name': Rename_Task, 'duration_secs': 0.177399} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1078.294716] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1078.294716] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-453622e4-b01d-4ecf-b4e7-1dd283023947 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.302142] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1078.302142] env[68217]: value = "task-2961932" [ 1078.302142] env[68217]: _type = "Task" [ 1078.302142] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1078.311334] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961932, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.366043] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d5eb7cb-046f-41cb-a408-c1af38b68fe8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.372493] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d72d91c1-ad60-4280-8aed-e8cc7acecba0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.408177] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.412079] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cb746e-557d-4caa-905a-f5a1a82a0c46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.423600] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b285c648-7e24-4c12-8203-99e0c71f1e55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.437880] env[68217]: DEBUG nova.compute.provider_tree [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1078.680910] env[68217]: DEBUG nova.network.neutron [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1078.816131] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961932, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1078.831032] env[68217]: DEBUG nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1078.876211] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1078.876211] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1078.876211] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1078.876211] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1078.876211] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1078.876211] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1078.876211] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1078.876545] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1078.876864] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1078.877219] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1078.877542] env[68217]: DEBUG nova.virt.hardware [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1078.878560] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3796a525-b352-4c23-9fbc-cb8bdc544056 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.886484] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5ed9e90-f4bb-467e-a9a8-ef6f7c8166a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1078.943024] env[68217]: DEBUG nova.scheduler.client.report [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1078.958836] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1078.960617] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.087111] env[68217]: DEBUG nova.compute.manager [req-35df1bb7-3f5a-4eab-93be-8cafab27e126 req-7d09b3e8-9ce0-4aa4-9bb7-169ec4d579ee service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Received event network-vif-deleted-0ba43f81-704a-45f6-b856-293799e1bccc {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1079.087111] env[68217]: INFO nova.compute.manager [req-35df1bb7-3f5a-4eab-93be-8cafab27e126 req-7d09b3e8-9ce0-4aa4-9bb7-169ec4d579ee service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Neutron deleted interface 0ba43f81-704a-45f6-b856-293799e1bccc; detaching it from the instance and deleting it from the info cache [ 1079.087111] env[68217]: DEBUG nova.network.neutron [req-35df1bb7-3f5a-4eab-93be-8cafab27e126 req-7d09b3e8-9ce0-4aa4-9bb7-169ec4d579ee service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.185629] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1079.313860] env[68217]: DEBUG oslo_vmware.api [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2961932, 'name': PowerOnVM_Task, 'duration_secs': 0.538914} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1079.314109] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1079.314292] env[68217]: INFO nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Took 9.03 seconds to spawn the instance on the hypervisor. [ 1079.314470] env[68217]: DEBUG nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1079.315406] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846ba58b-0626-446a-a2de-c3d8aea09e89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.447802] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.363s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.448382] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1079.451444] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.332s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.452941] env[68217]: INFO nova.compute.claims [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1079.462224] env[68217]: DEBUG nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1079.570466] env[68217]: DEBUG nova.network.neutron [-] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1079.589430] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eac47071-eca1-4595-845b-4f16dda7c595 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.599168] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f3ab6d-15d0-49e5-92b3-775d226c9028 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1079.629281] env[68217]: DEBUG nova.compute.manager [req-35df1bb7-3f5a-4eab-93be-8cafab27e126 req-7d09b3e8-9ce0-4aa4-9bb7-169ec4d579ee service nova] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Detach interface failed, port_id=0ba43f81-704a-45f6-b856-293799e1bccc, reason: Instance 1faf45fb-a3b0-4647-b63d-3f51695b6171 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1079.649068] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.649068] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.649068] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1079.649322] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1079.649503] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1079.652026] env[68217]: INFO nova.compute.manager [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Terminating instance [ 1079.834071] env[68217]: INFO nova.compute.manager [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Took 16.10 seconds to build instance. [ 1079.953200] env[68217]: DEBUG nova.compute.utils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1079.954709] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1079.954855] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1079.984223] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.000928] env[68217]: DEBUG nova.policy [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bb6b97339c44b8ca35e57e67e1462d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbbb8a9e27bf4e00ac2a97750661ddbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1080.073132] env[68217]: INFO nova.compute.manager [-] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Took 2.16 seconds to deallocate network for instance. [ 1080.155432] env[68217]: DEBUG nova.compute.manager [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1080.155658] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1080.156580] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-739775c2-9149-4540-8e66-eb4ae5f1dd33 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.165942] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1080.166217] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2bd0ae09-878e-4cd0-9a6c-2b3122fe8353 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.189445] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1080.189723] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7782fa30-9fad-4853-a06a-15a0fbf6a1e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.196691] env[68217]: DEBUG oslo_vmware.api [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1080.196691] env[68217]: value = "task-2961934" [ 1080.196691] env[68217]: _type = "Task" [ 1080.196691] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.204763] env[68217]: DEBUG oslo_vmware.api [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961934, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.241130] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1080.241457] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1080.241730] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleting the datastore file [datastore2] e3dfe047-7cdc-4a1d-8af3-6437b5555ac4 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1080.242552] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f5a506a-d435-468e-ab4e-8ee116dafc6e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.252026] env[68217]: DEBUG oslo_vmware.api [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1080.252026] env[68217]: value = "task-2961935" [ 1080.252026] env[68217]: _type = "Task" [ 1080.252026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1080.261768] env[68217]: DEBUG oslo_vmware.api [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1080.337676] env[68217]: DEBUG oslo_concurrency.lockutils [None req-52c6cdb1-c5c1-43e4-a208-b10fc2d7bf90 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.614s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1080.458529] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1080.509224] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Successfully created port: fb83e739-088d-4c2b-a569-56a62ab234aa {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1080.538599] env[68217]: DEBUG nova.network.neutron [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Successfully updated port: 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1080.581252] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1080.703581] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27f58e4-1105-4146-ac7d-44274d559124 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.710866] env[68217]: DEBUG oslo_vmware.api [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2961934, 'name': PowerOnVM_Task, 'duration_secs': 0.391424} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.711332] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1080.711545] env[68217]: DEBUG nova.compute.manager [None req-4154bf2d-8f20-4cbb-85b9-c12fa089030a tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1080.713120] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da99ae5-e7ba-447e-9062-630006626cb4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.718916] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f10dec2f-f76c-42df-9ead-9aa775bdbfc2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.758604] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c85a592b-2906-4e54-a1d6-12cf2bef4db6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.769857] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3453bb70-2fa6-4a1f-9edd-6948e3e51922 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1080.773442] env[68217]: DEBUG oslo_vmware.api [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.136177} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1080.774233] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1080.774454] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1080.774638] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1080.774807] env[68217]: INFO nova.compute.manager [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Took 0.62 seconds to destroy the instance on the hypervisor. [ 1080.775054] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1080.775821] env[68217]: DEBUG nova.compute.manager [-] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1080.775821] env[68217]: DEBUG nova.network.neutron [-] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1080.785614] env[68217]: DEBUG nova.compute.provider_tree [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1081.044625] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.046242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.046242] env[68217]: DEBUG nova.network.neutron [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1081.131308] env[68217]: DEBUG nova.compute.manager [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-vif-plugged-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.131446] env[68217]: DEBUG oslo_concurrency.lockutils [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.131636] env[68217]: DEBUG oslo_concurrency.lockutils [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] Lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.131835] env[68217]: DEBUG oslo_concurrency.lockutils [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] Lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.132018] env[68217]: DEBUG nova.compute.manager [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] No waiting events found dispatching network-vif-plugged-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1081.132243] env[68217]: WARNING nova.compute.manager [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received unexpected event network-vif-plugged-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 for instance with vm_state building and task_state spawning. [ 1081.132389] env[68217]: DEBUG nova.compute.manager [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.132545] env[68217]: DEBUG nova.compute.manager [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing instance network info cache due to event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1081.132710] env[68217]: DEBUG oslo_concurrency.lockutils [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.186228] env[68217]: DEBUG nova.compute.manager [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Received event network-changed-9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1081.186372] env[68217]: DEBUG nova.compute.manager [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Refreshing instance network info cache due to event network-changed-9414aa68-f0b4-452d-a959-86e8f0acf53e. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1081.186562] env[68217]: DEBUG oslo_concurrency.lockutils [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] Acquiring lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1081.186705] env[68217]: DEBUG oslo_concurrency.lockutils [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] Acquired lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1081.186863] env[68217]: DEBUG nova.network.neutron [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Refreshing network info cache for port 9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1081.289132] env[68217]: DEBUG nova.scheduler.client.report [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1081.473501] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1081.499676] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1081.499956] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1081.500171] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1081.500396] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1081.500564] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1081.500744] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1081.500987] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1081.501202] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1081.501462] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1081.501618] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1081.501820] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1081.502811] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c9648d8-0e37-4d8e-81fa-8dfe5cb80b41 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.511141] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8154e0b0-5bbd-4360-ab40-a614826b0612 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.580070] env[68217]: DEBUG nova.network.neutron [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1081.667414] env[68217]: DEBUG nova.network.neutron [-] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.753354] env[68217]: DEBUG nova.network.neutron [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1081.793754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.794223] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1081.796678] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 3.389s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.796845] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1081.796994] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1081.797315] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.813s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1081.798779] env[68217]: INFO nova.compute.claims [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.805058] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bf4836-740a-4ad7-a06d-863c30c4ae2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.810831] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70b9f1dd-db41-4853-a74b-682fc55d7301 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.828866] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e617a86-14c9-4f4c-841a-658d80d08809 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.837134] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d41d32e7-e34b-4775-8d33-5e02bcae6e5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.872449] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179484MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1081.872606] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1081.956470] env[68217]: DEBUG nova.network.neutron [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updated VIF entry in instance network info cache for port 9414aa68-f0b4-452d-a959-86e8f0acf53e. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1081.956823] env[68217]: DEBUG nova.network.neutron [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updating instance_info_cache with network_info: [{"id": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "address": "fa:16:3e:12:ac:63", "network": {"id": "0e4ebc2e-6e2d-4414-a560-9db08d15dabf", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1709597117-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.233", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "904fd1b1eb9d4ab8bd1ea9967249bc29", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9414aa68-f0", "ovs_interfaceid": "9414aa68-f0b4-452d-a959-86e8f0acf53e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.173748] env[68217]: INFO nova.compute.manager [-] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Took 1.40 seconds to deallocate network for instance. [ 1082.194113] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Successfully updated port: fb83e739-088d-4c2b-a569-56a62ab234aa {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1082.255054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.255281] env[68217]: DEBUG nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Instance network_info: |[{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1082.255558] env[68217]: DEBUG oslo_concurrency.lockutils [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.255736] env[68217]: DEBUG nova.network.neutron [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1082.256875] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:41:6d:56', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '513d21ef-f0b3-47f7-96ae-f01c23ac3ef1', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1082.264726] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1082.267648] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1082.268139] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5d9483c1-11d6-49f3-bc21-ca4a41ef5210 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1082.291605] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1082.291605] env[68217]: value = "task-2961936" [ 1082.291605] env[68217]: _type = "Task" [ 1082.291605] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1082.299607] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961936, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.302893] env[68217]: DEBUG nova.compute.utils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1082.304423] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1082.304639] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1082.377384] env[68217]: DEBUG nova.policy [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bb6b97339c44b8ca35e57e67e1462d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbbb8a9e27bf4e00ac2a97750661ddbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1082.460319] env[68217]: DEBUG oslo_concurrency.lockutils [req-905456e8-6706-48b6-8de8-bca7495f3266 req-13bf93f9-f7a7-4f2a-b723-1d6424892790 service nova] Releasing lock "refresh_cache-2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1082.607550] env[68217]: DEBUG nova.network.neutron [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updated VIF entry in instance network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1082.607945] env[68217]: DEBUG nova.network.neutron [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1082.680721] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1082.699697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "refresh_cache-8f0d5766-005e-459d-b9f4-e46b435e43b8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1082.699697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "refresh_cache-8f0d5766-005e-459d-b9f4-e46b435e43b8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1082.699697] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1082.763730] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Successfully created port: 3c55e109-8e58-427e-97e6-51b5fe90081f {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1082.801545] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961936, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1082.807960] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1083.045781] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8da22104-09bb-45f4-a502-24afcd63af17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.053464] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd8a82dd-dc0b-407b-9511-dcbe8ff7d215 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.083246] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5273fea8-1dcb-4179-8baa-10e44e00c3b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.090490] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de16d37a-6806-42b8-9560-2fa26b828bd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.104858] env[68217]: DEBUG nova.compute.provider_tree [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1083.112408] env[68217]: DEBUG oslo_concurrency.lockutils [req-df522406-15d3-4a4a-83d8-95ad5d1e5a20 req-b0cd6dd3-c5c7-4afc-90e3-c5816b88b7bc service nova] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.233890] env[68217]: DEBUG nova.compute.manager [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Received event network-vif-deleted-1916eda4-ae94-4d60-800d-2f94dc473c1e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.234190] env[68217]: DEBUG nova.compute.manager [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Received event network-vif-plugged-fb83e739-088d-4c2b-a569-56a62ab234aa {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.234328] env[68217]: DEBUG oslo_concurrency.lockutils [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] Acquiring lock "8f0d5766-005e-459d-b9f4-e46b435e43b8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1083.234530] env[68217]: DEBUG oslo_concurrency.lockutils [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1083.234696] env[68217]: DEBUG oslo_concurrency.lockutils [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1083.234873] env[68217]: DEBUG nova.compute.manager [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] No waiting events found dispatching network-vif-plugged-fb83e739-088d-4c2b-a569-56a62ab234aa {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1083.235067] env[68217]: WARNING nova.compute.manager [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Received unexpected event network-vif-plugged-fb83e739-088d-4c2b-a569-56a62ab234aa for instance with vm_state building and task_state spawning. [ 1083.235285] env[68217]: DEBUG nova.compute.manager [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Received event network-changed-fb83e739-088d-4c2b-a569-56a62ab234aa {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1083.235452] env[68217]: DEBUG nova.compute.manager [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Refreshing instance network info cache due to event network-changed-fb83e739-088d-4c2b-a569-56a62ab234aa. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1083.235632] env[68217]: DEBUG oslo_concurrency.lockutils [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] Acquiring lock "refresh_cache-8f0d5766-005e-459d-b9f4-e46b435e43b8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.244065] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1083.301448] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961936, 'name': CreateVM_Task, 'duration_secs': 0.574601} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.301620] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1083.302295] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.302456] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.302763] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1083.303029] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6876b252-957a-400e-88e6-7e02d0b109c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.307527] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1083.307527] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5268ee5a-50ce-34b0-d602-3702efb28d45" [ 1083.307527] env[68217]: _type = "Task" [ 1083.307527] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.320360] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5268ee5a-50ce-34b0-d602-3702efb28d45, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.434316] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Updating instance_info_cache with network_info: [{"id": "fb83e739-088d-4c2b-a569-56a62ab234aa", "address": "fa:16:3e:fb:93:89", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb83e739-08", "ovs_interfaceid": "fb83e739-088d-4c2b-a569-56a62ab234aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1083.608815] env[68217]: DEBUG nova.scheduler.client.report [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1083.818318] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5268ee5a-50ce-34b0-d602-3702efb28d45, 'name': SearchDatastore_Task, 'duration_secs': 0.012564} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1083.818659] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.818839] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1083.819085] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1083.819236] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.819505] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1083.819768] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6dacbd04-270e-4cb1-87df-1406f0c22d3d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.822307] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1083.832083] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1083.832294] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1083.833056] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cc188926-6005-42be-9662-d636a19fcdb0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.838528] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1083.838528] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c13519-7234-80db-c83f-02b23eec97af" [ 1083.838528] env[68217]: _type = "Task" [ 1083.838528] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.847336] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c13519-7234-80db-c83f-02b23eec97af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1083.849322] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1083.849609] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1083.849725] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1083.849908] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1083.850072] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1083.850204] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1083.850407] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1083.850566] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1083.850730] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1083.850900] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1083.851063] env[68217]: DEBUG nova.virt.hardware [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1083.851774] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9106af5-e56a-40a7-abb2-6c8ddfe45ea3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.858874] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9938133e-2c59-48d0-84ae-69b9be8aa597 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.936791] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "refresh_cache-8f0d5766-005e-459d-b9f4-e46b435e43b8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1083.937226] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Instance network_info: |[{"id": "fb83e739-088d-4c2b-a569-56a62ab234aa", "address": "fa:16:3e:fb:93:89", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb83e739-08", "ovs_interfaceid": "fb83e739-088d-4c2b-a569-56a62ab234aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1083.937545] env[68217]: DEBUG oslo_concurrency.lockutils [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] Acquired lock "refresh_cache-8f0d5766-005e-459d-b9f4-e46b435e43b8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1083.937727] env[68217]: DEBUG nova.network.neutron [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Refreshing network info cache for port fb83e739-088d-4c2b-a569-56a62ab234aa {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1083.939211] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fb:93:89', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'fb83e739-088d-4c2b-a569-56a62ab234aa', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1083.947392] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Creating folder: Project (fbbb8a9e27bf4e00ac2a97750661ddbb). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.947765] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-45c504dd-e037-48a6-b535-27637fe8888a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.958606] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Created folder: Project (fbbb8a9e27bf4e00ac2a97750661ddbb) in parent group-v594094. [ 1083.958778] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Creating folder: Instances. Parent ref: group-v594383. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1083.959015] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8392f2a7-b136-485d-acdc-a878a3f2ffdc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.970120] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Created folder: Instances in parent group-v594383. [ 1083.970342] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1083.970525] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1083.970715] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-748665e4-504a-4393-9c2f-74f125c1ff25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1083.989392] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1083.989392] env[68217]: value = "task-2961939" [ 1083.989392] env[68217]: _type = "Task" [ 1083.989392] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1083.996525] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961939, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.114709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1084.115292] env[68217]: DEBUG nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1084.118132] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.537s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1084.118359] env[68217]: DEBUG nova.objects.instance [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'resources' on Instance uuid 1faf45fb-a3b0-4647-b63d-3f51695b6171 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1084.348609] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c13519-7234-80db-c83f-02b23eec97af, 'name': SearchDatastore_Task, 'duration_secs': 0.026135} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.349433] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d89d2af-cf33-47ea-86a9-a5a97b0b56e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.354721] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1084.354721] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dc9b2f-25cf-d3f9-d0f7-6bf0f2217267" [ 1084.354721] env[68217]: _type = "Task" [ 1084.354721] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.361888] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dc9b2f-25cf-d3f9-d0f7-6bf0f2217267, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.499687] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961939, 'name': CreateVM_Task, 'duration_secs': 0.367864} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.499959] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1084.500695] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1084.500911] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1084.501308] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1084.503634] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-861b6ea0-0b49-47be-8953-657acdf4ac09 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.508191] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1084.508191] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524fdb9e-6b99-8088-9da9-36905efd7420" [ 1084.508191] env[68217]: _type = "Task" [ 1084.508191] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.516395] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524fdb9e-6b99-8088-9da9-36905efd7420, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.622396] env[68217]: DEBUG nova.compute.utils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1084.626065] env[68217]: DEBUG nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1084.626244] env[68217]: DEBUG nova.network.neutron [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1084.698834] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Successfully updated port: 3c55e109-8e58-427e-97e6-51b5fe90081f {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1084.718169] env[68217]: DEBUG nova.policy [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34286c10b8b242fb83eb4f1493b9477b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90ad2b0a8a0743ca80a0685bf56e0446', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1084.815500] env[68217]: DEBUG nova.network.neutron [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Updated VIF entry in instance network info cache for port fb83e739-088d-4c2b-a569-56a62ab234aa. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1084.815846] env[68217]: DEBUG nova.network.neutron [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Updating instance_info_cache with network_info: [{"id": "fb83e739-088d-4c2b-a569-56a62ab234aa", "address": "fa:16:3e:fb:93:89", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapfb83e739-08", "ovs_interfaceid": "fb83e739-088d-4c2b-a569-56a62ab234aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1084.853840] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e95fcd76-55db-4f3b-820a-dc70a6168a23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.865818] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52dc9b2f-25cf-d3f9-d0f7-6bf0f2217267, 'name': SearchDatastore_Task, 'duration_secs': 0.00869} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1084.867395] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1084.867655] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 89136574-575c-47da-928c-bd7a5dbb3a98/89136574-575c-47da-928c-bd7a5dbb3a98.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1084.867934] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-836951f1-3f22-4bb1-a417-22cc6e41bd61 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.870811] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3fa42839-10ff-4579-80bc-4e185019e0ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.903467] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a1aecda-2e1a-4829-a670-f84ec3cab4bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.906052] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1084.906052] env[68217]: value = "task-2961940" [ 1084.906052] env[68217]: _type = "Task" [ 1084.906052] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1084.912616] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3dd9358-2698-4b9c-85f2-2172c2186c4a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.919199] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961940, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1084.930209] env[68217]: DEBUG nova.compute.provider_tree [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1085.019585] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524fdb9e-6b99-8088-9da9-36905efd7420, 'name': SearchDatastore_Task, 'duration_secs': 0.008795} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.019910] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.020161] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1085.020396] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.020539] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.020718] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1085.020979] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8ba704f1-921e-4d40-89e6-f77765442fda {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.029099] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1085.029285] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1085.029986] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8576f9f0-18bf-4cda-9d4c-18bdea466fe1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.035047] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1085.035047] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52121e9b-1889-b245-e87b-75d0c20a9882" [ 1085.035047] env[68217]: _type = "Task" [ 1085.035047] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.044548] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52121e9b-1889-b245-e87b-75d0c20a9882, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.062657] env[68217]: DEBUG nova.network.neutron [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Successfully created port: 189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1085.127481] env[68217]: DEBUG nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1085.201894] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "refresh_cache-38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.202174] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "refresh_cache-38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1085.202232] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1085.265181] env[68217]: DEBUG nova.compute.manager [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Received event network-vif-plugged-3c55e109-8e58-427e-97e6-51b5fe90081f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.265277] env[68217]: DEBUG oslo_concurrency.lockutils [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] Acquiring lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1085.265422] env[68217]: DEBUG oslo_concurrency.lockutils [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.265631] env[68217]: DEBUG oslo_concurrency.lockutils [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.265887] env[68217]: DEBUG nova.compute.manager [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] No waiting events found dispatching network-vif-plugged-3c55e109-8e58-427e-97e6-51b5fe90081f {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1085.266176] env[68217]: WARNING nova.compute.manager [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Received unexpected event network-vif-plugged-3c55e109-8e58-427e-97e6-51b5fe90081f for instance with vm_state building and task_state spawning. [ 1085.266333] env[68217]: DEBUG nova.compute.manager [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Received event network-changed-3c55e109-8e58-427e-97e6-51b5fe90081f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1085.266451] env[68217]: DEBUG nova.compute.manager [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Refreshing instance network info cache due to event network-changed-3c55e109-8e58-427e-97e6-51b5fe90081f. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1085.266690] env[68217]: DEBUG oslo_concurrency.lockutils [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] Acquiring lock "refresh_cache-38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.318642] env[68217]: DEBUG oslo_concurrency.lockutils [req-d24832e6-2701-45c3-8a0c-776a6a4a4115 req-b34f0523-5b19-411a-bdca-7d3596cdb1cd service nova] Releasing lock "refresh_cache-8f0d5766-005e-459d-b9f4-e46b435e43b8" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1085.416306] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961940, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.511109} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.416618] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 89136574-575c-47da-928c-bd7a5dbb3a98/89136574-575c-47da-928c-bd7a5dbb3a98.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1085.416772] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1085.417077] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d177e624-9957-4239-8759-ae967a04938a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.422900] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1085.422900] env[68217]: value = "task-2961941" [ 1085.422900] env[68217]: _type = "Task" [ 1085.422900] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.430661] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961941, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.432625] env[68217]: DEBUG nova.scheduler.client.report [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1085.545517] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52121e9b-1889-b245-e87b-75d0c20a9882, 'name': SearchDatastore_Task, 'duration_secs': 0.008795} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.546271] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7e34f45d-a223-4467-98f3-194a0241227e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.550953] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1085.550953] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e2836a-7ff0-08a4-2781-b1dccbc7616d" [ 1085.550953] env[68217]: _type = "Task" [ 1085.550953] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.558177] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e2836a-7ff0-08a4-2781-b1dccbc7616d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.733028] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1085.870917] env[68217]: DEBUG nova.network.neutron [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Updating instance_info_cache with network_info: [{"id": "3c55e109-8e58-427e-97e6-51b5fe90081f", "address": "fa:16:3e:7a:fa:8b", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c55e109-8e", "ovs_interfaceid": "3c55e109-8e58-427e-97e6-51b5fe90081f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.934178] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961941, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066329} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.934462] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1085.935283] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-814bfd93-19cf-40d5-8652-e9c61e0b698d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.938435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.820s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1085.940524] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 4.068s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1085.966180] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Reconfiguring VM instance instance-0000006c to attach disk [datastore1] 89136574-575c-47da-928c-bd7a5dbb3a98/89136574-575c-47da-928c-bd7a5dbb3a98.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1085.967371] env[68217]: INFO nova.scheduler.client.report [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted allocations for instance 1faf45fb-a3b0-4647-b63d-3f51695b6171 [ 1085.968561] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4dab2e54-604d-47df-b573-f4ab667f144e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.991390] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1085.991390] env[68217]: value = "task-2961942" [ 1085.991390] env[68217]: _type = "Task" [ 1085.991390] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.999909] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961942, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.060950] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e2836a-7ff0-08a4-2781-b1dccbc7616d, 'name': SearchDatastore_Task, 'duration_secs': 0.009143} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.061238] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.061497] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 8f0d5766-005e-459d-b9f4-e46b435e43b8/8f0d5766-005e-459d-b9f4-e46b435e43b8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1086.061749] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7fb85171-5418-445b-9208-cdb0c5653101 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.069132] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1086.069132] env[68217]: value = "task-2961943" [ 1086.069132] env[68217]: _type = "Task" [ 1086.069132] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.077068] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961943, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.137438] env[68217]: DEBUG nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1086.181648] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1086.181892] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1086.182062] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1086.182254] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1086.182438] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1086.182561] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1086.182769] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1086.182925] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1086.183117] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1086.183296] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1086.183470] env[68217]: DEBUG nova.virt.hardware [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1086.184361] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b28c6c-8a22-4f0e-a17a-ceb9b85d9cf6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.192599] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae8c45be-c6d2-4e43-86fa-b2fd8a652150 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.373895] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "refresh_cache-38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1086.374217] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Instance network_info: |[{"id": "3c55e109-8e58-427e-97e6-51b5fe90081f", "address": "fa:16:3e:7a:fa:8b", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c55e109-8e", "ovs_interfaceid": "3c55e109-8e58-427e-97e6-51b5fe90081f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1086.374544] env[68217]: DEBUG oslo_concurrency.lockutils [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] Acquired lock "refresh_cache-38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1086.374726] env[68217]: DEBUG nova.network.neutron [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Refreshing network info cache for port 3c55e109-8e58-427e-97e6-51b5fe90081f {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1086.376025] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7a:fa:8b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3c55e109-8e58-427e-97e6-51b5fe90081f', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1086.384226] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1086.384703] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1086.385782] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e7595300-fb5a-4bfc-bd0b-55b16a0ca37c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.407999] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1086.407999] env[68217]: value = "task-2961944" [ 1086.407999] env[68217]: _type = "Task" [ 1086.407999] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.423332] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961944, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.490981] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ec8994c-03f5-46c6-afbf-5b5c2c666104 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "1faf45fb-a3b0-4647-b63d-3f51695b6171" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.183s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1086.508730] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.585305] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961943, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.632697] env[68217]: DEBUG nova.network.neutron [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Successfully updated port: 189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1086.918194] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961944, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.002767] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961942, 'name': ReconfigVM_Task, 'duration_secs': 0.745789} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.003066] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Reconfigured VM instance instance-0000006c to attach disk [datastore1] 89136574-575c-47da-928c-bd7a5dbb3a98/89136574-575c-47da-928c-bd7a5dbb3a98.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1087.003758] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-96986972-c6ae-4e2c-b30e-5e44caff08ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.008722] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.008878] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.008992] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 3af571ce-c400-45a1-97ad-4fbd53395129 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.009214] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 776798bf-1ad4-4acb-ac58-cacc5493e1c7 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.009375] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.009482] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.009590] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d28bcf16-b081-4dc8-a975-2acaed222e15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.009663] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 6b4dff91-254e-43cc-85cf-7de6214dcafd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.009773] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a4dcc7fb-83e4-4bb9-9c98-9569daee1435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.009878] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 815d1801-fa07-4466-850d-b1a36d630d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.010121] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e3dfe047-7cdc-4a1d-8af3-6437b5555ac4 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1087.010224] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.010388] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 89136574-575c-47da-928c-bd7a5dbb3a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.010515] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 8f0d5766-005e-459d-b9f4-e46b435e43b8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.010629] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.010740] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 400058d8-f9ca-41b9-a671-b04b0511d074 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1087.010950] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 15 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1087.011093] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3392MB phys_disk=200GB used_disk=15GB total_vcpus=48 used_vcpus=15 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1087.017353] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1087.017353] env[68217]: value = "task-2961945" [ 1087.017353] env[68217]: _type = "Task" [ 1087.017353] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.027691] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961945, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.082972] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961943, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.836966} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.083259] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 8f0d5766-005e-459d-b9f4-e46b435e43b8/8f0d5766-005e-459d-b9f4-e46b435e43b8.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1087.083469] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1087.083719] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-22ced427-9b8b-4fe5-80b0-fe380d0fc6b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.090295] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1087.090295] env[68217]: value = "task-2961946" [ 1087.090295] env[68217]: _type = "Task" [ 1087.090295] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.100735] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961946, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.130029] env[68217]: DEBUG nova.network.neutron [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Updated VIF entry in instance network info cache for port 3c55e109-8e58-427e-97e6-51b5fe90081f. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1087.130317] env[68217]: DEBUG nova.network.neutron [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Updating instance_info_cache with network_info: [{"id": "3c55e109-8e58-427e-97e6-51b5fe90081f", "address": "fa:16:3e:7a:fa:8b", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3c55e109-8e", "ovs_interfaceid": "3c55e109-8e58-427e-97e6-51b5fe90081f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.135747] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.135747] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.135910] env[68217]: DEBUG nova.network.neutron [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1087.244872] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cd5b06b-ec8a-42b8-9e50-1cc4732772ce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.254956] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa7fa866-f111-4c83-88c0-43d201caa5f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.291193] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ea4e03e-8449-477a-bf94-ab9a2af53c9e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.297870] env[68217]: DEBUG nova.compute.manager [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Received event network-vif-plugged-189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1087.298117] env[68217]: DEBUG oslo_concurrency.lockutils [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.298370] env[68217]: DEBUG oslo_concurrency.lockutils [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] Lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.298489] env[68217]: DEBUG oslo_concurrency.lockutils [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] Lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1087.298717] env[68217]: DEBUG nova.compute.manager [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] No waiting events found dispatching network-vif-plugged-189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1087.298915] env[68217]: WARNING nova.compute.manager [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Received unexpected event network-vif-plugged-189406da-f39b-4370-b43d-945cbb45afb2 for instance with vm_state building and task_state spawning. [ 1087.299091] env[68217]: DEBUG nova.compute.manager [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Received event network-changed-189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1087.299250] env[68217]: DEBUG nova.compute.manager [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Refreshing instance network info cache due to event network-changed-189406da-f39b-4370-b43d-945cbb45afb2. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1087.301034] env[68217]: DEBUG oslo_concurrency.lockutils [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] Acquiring lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.306145] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f285ddd-718d-42c9-9590-9f0d4b51d698 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.329503] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1087.421847] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961944, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.438649] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1087.438939] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1087.527567] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961945, 'name': Rename_Task, 'duration_secs': 0.429943} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.527873] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1087.528209] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f8c1fc1e-9776-41a6-b735-065e32c5525f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.534962] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1087.534962] env[68217]: value = "task-2961947" [ 1087.534962] env[68217]: _type = "Task" [ 1087.534962] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.542841] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.602196] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961946, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.206341} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.602503] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1087.603336] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3d89e6d-1e4b-4b74-bb49-0d9be5efaec2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.625644] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Reconfiguring VM instance instance-0000006d to attach disk [datastore1] 8f0d5766-005e-459d-b9f4-e46b435e43b8/8f0d5766-005e-459d-b9f4-e46b435e43b8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1087.625870] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-69df967c-1853-4592-a134-b34b37f8e4b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.642135] env[68217]: DEBUG oslo_concurrency.lockutils [req-ee09c778-f613-4546-a80b-a9a245f8de0e req-f2d60178-469f-4d74-8aee-de81811df384 service nova] Releasing lock "refresh_cache-38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1087.649169] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1087.649169] env[68217]: value = "task-2961948" [ 1087.649169] env[68217]: _type = "Task" [ 1087.649169] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.656990] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961948, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.687825] env[68217]: DEBUG nova.network.neutron [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1087.833786] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1087.879653] env[68217]: DEBUG nova.network.neutron [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.920334] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961944, 'name': CreateVM_Task, 'duration_secs': 1.018} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1087.920546] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1087.921441] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1087.921686] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1087.922097] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1087.922424] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9014f074-92f3-4be5-97c2-23b575d41e15 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.927353] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1087.927353] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524abb00-59b1-37dc-582b-2f39466579ac" [ 1087.927353] env[68217]: _type = "Task" [ 1087.927353] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.935635] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524abb00-59b1-37dc-582b-2f39466579ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.942546] env[68217]: INFO nova.compute.manager [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Detaching volume 510a8087-136f-46d8-91cf-d7ca59a54445 [ 1087.980422] env[68217]: INFO nova.virt.block_device [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Attempting to driver detach volume 510a8087-136f-46d8-91cf-d7ca59a54445 from mountpoint /dev/sdb [ 1087.980646] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1087.980917] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594367', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'name': 'volume-510a8087-136f-46d8-91cf-d7ca59a54445', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '776798bf-1ad4-4acb-ac58-cacc5493e1c7', 'attached_at': '', 'detached_at': '', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'serial': '510a8087-136f-46d8-91cf-d7ca59a54445'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1087.982043] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8eb26d5a-91b5-4d64-bd87-05f47fe01058 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.004336] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a23dcb0-5677-4aa5-8ddb-49d65fdd7656 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.010981] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c284c56-08b5-43f7-91bf-98b50782833e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.031331] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38e1c0ec-f876-400d-bb76-bff43c91e1cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.048274] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] The volume has not been displaced from its original location: [datastore1] volume-510a8087-136f-46d8-91cf-d7ca59a54445/volume-510a8087-136f-46d8-91cf-d7ca59a54445.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1088.053631] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1088.054274] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3f547903-14a3-43ae-8bd3-394fd21addd6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.071135] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961947, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.076498] env[68217]: DEBUG oslo_vmware.api [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1088.076498] env[68217]: value = "task-2961949" [ 1088.076498] env[68217]: _type = "Task" [ 1088.076498] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.089025] env[68217]: DEBUG oslo_vmware.api [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961949, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.160630] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961948, 'name': ReconfigVM_Task, 'duration_secs': 0.282572} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.160908] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Reconfigured VM instance instance-0000006d to attach disk [datastore1] 8f0d5766-005e-459d-b9f4-e46b435e43b8/8f0d5766-005e-459d-b9f4-e46b435e43b8.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1088.161550] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-37be07bb-e879-4146-aac8-958dc1369130 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.167707] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1088.167707] env[68217]: value = "task-2961950" [ 1088.167707] env[68217]: _type = "Task" [ 1088.167707] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.176978] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961950, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.339367] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1088.339568] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.399s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.339843] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.660s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1088.340048] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.361336] env[68217]: INFO nova.scheduler.client.report [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted allocations for instance e3dfe047-7cdc-4a1d-8af3-6437b5555ac4 [ 1088.382103] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.382436] env[68217]: DEBUG nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Instance network_info: |[{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1088.382747] env[68217]: DEBUG oslo_concurrency.lockutils [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] Acquired lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.382925] env[68217]: DEBUG nova.network.neutron [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Refreshing network info cache for port 189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1088.384083] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:fa:ff:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '189406da-f39b-4370-b43d-945cbb45afb2', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1088.391868] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1088.392819] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1088.393063] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-72ff81f2-4337-4d84-a225-bc4dfc573e3d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.412981] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1088.412981] env[68217]: value = "task-2961951" [ 1088.412981] env[68217]: _type = "Task" [ 1088.412981] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.421069] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961951, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.436857] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524abb00-59b1-37dc-582b-2f39466579ac, 'name': SearchDatastore_Task, 'duration_secs': 0.014439} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.437192] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1088.437424] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1088.437649] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.437792] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.437963] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1088.438255] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4fb2544a-ddd8-4940-bee5-50629a3c4379 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.447336] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1088.447617] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1088.448565] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-549869e1-bd85-49bd-a663-4ef8103bab59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.452852] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1088.452852] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52357297-2828-f622-1491-5be5df0713e6" [ 1088.452852] env[68217]: _type = "Task" [ 1088.452852] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.461154] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52357297-2828-f622-1491-5be5df0713e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.544965] env[68217]: DEBUG oslo_vmware.api [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961947, 'name': PowerOnVM_Task, 'duration_secs': 0.72561} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.545259] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1088.545474] env[68217]: INFO nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Took 9.72 seconds to spawn the instance on the hypervisor. [ 1088.545651] env[68217]: DEBUG nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1088.546692] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-023f567b-0994-4824-871c-807e1a3e7313 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.585836] env[68217]: DEBUG oslo_vmware.api [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961949, 'name': ReconfigVM_Task, 'duration_secs': 0.324211} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.586166] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1088.590879] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca93191d-3847-4556-8a93-6d63a1dd2a99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.607245] env[68217]: DEBUG oslo_vmware.api [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1088.607245] env[68217]: value = "task-2961952" [ 1088.607245] env[68217]: _type = "Task" [ 1088.607245] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.616223] env[68217]: DEBUG oslo_vmware.api [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961952, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.678881] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961950, 'name': Rename_Task, 'duration_secs': 0.138544} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.679190] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1088.679443] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0d4e7dd1-0ab6-4e12-82ef-ddafe8fae5ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.685692] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1088.685692] env[68217]: value = "task-2961953" [ 1088.685692] env[68217]: _type = "Task" [ 1088.685692] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.694270] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961953, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.870099] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e845bc55-d54b-4249-a470-3a95e60fe315 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "e3dfe047-7cdc-4a1d-8af3-6437b5555ac4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.221s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1088.924142] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961951, 'name': CreateVM_Task, 'duration_secs': 0.419666} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.927849] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1088.928421] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1088.928692] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1088.929165] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1088.929783] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-beae55bf-157a-4a36-a062-eb3ea14ca420 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.934531] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1088.934531] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d82bf0-16d0-e69a-0217-68999fffcb6d" [ 1088.934531] env[68217]: _type = "Task" [ 1088.934531] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.944053] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d82bf0-16d0-e69a-0217-68999fffcb6d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.963899] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52357297-2828-f622-1491-5be5df0713e6, 'name': SearchDatastore_Task, 'duration_secs': 0.010021} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1088.964740] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ce25212d-872f-4001-bcc3-68e1303dbda9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1088.969914] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1088.969914] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab5335-ffdf-11aa-fb3b-9dfe1c1ce7f2" [ 1088.969914] env[68217]: _type = "Task" [ 1088.969914] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1088.977618] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab5335-ffdf-11aa-fb3b-9dfe1c1ce7f2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.067569] env[68217]: INFO nova.compute.manager [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Took 14.65 seconds to build instance. [ 1089.123509] env[68217]: DEBUG oslo_vmware.api [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961952, 'name': ReconfigVM_Task, 'duration_secs': 0.151541} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.123853] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594367', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'name': 'volume-510a8087-136f-46d8-91cf-d7ca59a54445', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '776798bf-1ad4-4acb-ac58-cacc5493e1c7', 'attached_at': '', 'detached_at': '', 'volume_id': '510a8087-136f-46d8-91cf-d7ca59a54445', 'serial': '510a8087-136f-46d8-91cf-d7ca59a54445'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1089.177258] env[68217]: DEBUG nova.network.neutron [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updated VIF entry in instance network info cache for port 189406da-f39b-4370-b43d-945cbb45afb2. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1089.177643] env[68217]: DEBUG nova.network.neutron [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1089.200152] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961953, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.445234] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d82bf0-16d0-e69a-0217-68999fffcb6d, 'name': SearchDatastore_Task, 'duration_secs': 0.029889} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.445530] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.445752] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1089.445978] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.479675] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ab5335-ffdf-11aa-fb3b-9dfe1c1ce7f2, 'name': SearchDatastore_Task, 'duration_secs': 0.026677} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.479948] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.480304] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9/38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1089.480468] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.480654] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1089.481144] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-58345762-60a4-4c81-bd57-6e767e2ae8de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.482764] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-02276aab-7a85-4de5-829e-f8abf2825316 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.489983] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1089.489983] env[68217]: value = "task-2961954" [ 1089.489983] env[68217]: _type = "Task" [ 1089.489983] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.493830] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1089.494029] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1089.495059] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6b0d1942-22e9-4284-ae5c-28b982119e6f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.499966] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.502973] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1089.502973] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5273e83c-10cd-0ad1-0558-bceaa17d00a4" [ 1089.502973] env[68217]: _type = "Task" [ 1089.502973] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1089.510039] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5273e83c-10cd-0ad1-0558-bceaa17d00a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.569779] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6e92e359-f961-4d28-8ec4-077054b2d4d5 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "89136574-575c-47da-928c-bd7a5dbb3a98" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.160s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.608056] env[68217]: DEBUG nova.compute.manager [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1089.608056] env[68217]: DEBUG nova.compute.manager [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing instance network info cache due to event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1089.608056] env[68217]: DEBUG oslo_concurrency.lockutils [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1089.608235] env[68217]: DEBUG oslo_concurrency.lockutils [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1089.608317] env[68217]: DEBUG nova.network.neutron [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1089.667827] env[68217]: DEBUG nova.objects.instance [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'flavor' on Instance uuid 776798bf-1ad4-4acb-ac58-cacc5493e1c7 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1089.680470] env[68217]: DEBUG oslo_concurrency.lockutils [req-6be408e5-4dec-41b1-9035-2314047a8425 req-ac5f4c62-e756-4237-8fa6-6a36c1c68d8a service nova] Releasing lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1089.700022] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961953, 'name': PowerOnVM_Task, 'duration_secs': 0.610161} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1089.701185] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1089.701185] env[68217]: INFO nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Took 8.23 seconds to spawn the instance on the hypervisor. [ 1089.701185] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1089.702173] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687362fa-3504-421b-b403-4d6f46bc62ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.903993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "3af571ce-c400-45a1-97ad-4fbd53395129" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.903993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "3af571ce-c400-45a1-97ad-4fbd53395129" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.903993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "3af571ce-c400-45a1-97ad-4fbd53395129-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.903993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "3af571ce-c400-45a1-97ad-4fbd53395129-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1089.903993] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "3af571ce-c400-45a1-97ad-4fbd53395129-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1089.904851] env[68217]: INFO nova.compute.manager [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Terminating instance [ 1089.949910] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1089.950185] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.002605] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961954, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.014317] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5273e83c-10cd-0ad1-0558-bceaa17d00a4, 'name': SearchDatastore_Task, 'duration_secs': 0.011158} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.015347] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f566a61e-14a7-4337-80ef-7b57cbae76b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.024018] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1090.024018] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb9cce-56b2-e08e-4ac4-b6ca774ca92f" [ 1090.024018] env[68217]: _type = "Task" [ 1090.024018] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.035085] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb9cce-56b2-e08e-4ac4-b6ca774ca92f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.218844] env[68217]: INFO nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Took 13.16 seconds to build instance. [ 1090.328744] env[68217]: DEBUG nova.network.neutron [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updated VIF entry in instance network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1090.329138] env[68217]: DEBUG nova.network.neutron [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1090.410729] env[68217]: DEBUG nova.compute.manager [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1090.411061] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1090.412295] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d537eed-8c8a-4a07-9c76-a6305e5837cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.421988] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1090.422344] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-075b1982-bf5f-4331-bd2f-cd3d9752b562 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.429553] env[68217]: DEBUG oslo_vmware.api [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1090.429553] env[68217]: value = "task-2961955" [ 1090.429553] env[68217]: _type = "Task" [ 1090.429553] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.439984] env[68217]: DEBUG oslo_vmware.api [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961955, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.453419] env[68217]: DEBUG nova.compute.utils [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1090.493890] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1090.494184] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1090.505204] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961954, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.80353} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.505960] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9/38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1090.506248] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1090.506596] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-edc69fc6-7657-4766-9fd7-23cf8af8d943 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.513597] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1090.513597] env[68217]: value = "task-2961956" [ 1090.513597] env[68217]: _type = "Task" [ 1090.513597] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.523564] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961956, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.534092] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb9cce-56b2-e08e-4ac4-b6ca774ca92f, 'name': SearchDatastore_Task, 'duration_secs': 0.050543} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.534422] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.534725] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1090.535073] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-442c06f9-db9f-43f2-9309-2732ffd8f991 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.541477] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1090.541477] env[68217]: value = "task-2961957" [ 1090.541477] env[68217]: _type = "Task" [ 1090.541477] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.549743] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961957, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.674756] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5e7dd77b-d076-4199-a467-62621893498f tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.236s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.721129] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.668s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.831609] env[68217]: DEBUG oslo_concurrency.lockutils [req-bb7be565-148f-42a6-b8ef-1b1d83b2a52b req-1a2a70ca-b20e-4524-8a1f-acc166e16836 service nova] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1090.942449] env[68217]: DEBUG oslo_vmware.api [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961955, 'name': PowerOffVM_Task, 'duration_secs': 0.308262} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.942830] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1090.943123] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1090.943413] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b1a5fb34-64c7-4297-b544-547310f98c99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.956521] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1090.999594] env[68217]: DEBUG nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1091.012236] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1091.012541] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1091.012917] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleting the datastore file [datastore1] 3af571ce-c400-45a1-97ad-4fbd53395129 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1091.013141] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8d54df5f-8fff-416b-9d97-e2916df020a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.027443] env[68217]: DEBUG oslo_vmware.api [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for the task: (returnval){ [ 1091.027443] env[68217]: value = "task-2961959" [ 1091.027443] env[68217]: _type = "Task" [ 1091.027443] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.030480] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961956, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.081281} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.034010] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1091.034850] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e115a13-f5c7-4cb1-babf-4619ea25a89b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.053547] env[68217]: DEBUG oslo_vmware.api [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.063093] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Reconfiguring VM instance instance-0000006e to attach disk [datastore1] 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9/38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1091.067526] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da9bb28c-c83c-4d7b-b59e-9a1b73386de1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.089051] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1091.089051] env[68217]: value = "task-2961960" [ 1091.089051] env[68217]: _type = "Task" [ 1091.089051] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.092765] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961957, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.102626] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961960, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.522530] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.522814] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.524663] env[68217]: INFO nova.compute.claims [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1091.542568] env[68217]: DEBUG oslo_vmware.api [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961959, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.561828] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961957, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.743888} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.563028] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1091.563192] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1091.563452] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-575ec4ae-63cb-4f6e-a373-1f5acb20c069 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.570026] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1091.570026] env[68217]: value = "task-2961961" [ 1091.570026] env[68217]: _type = "Task" [ 1091.570026] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.578300] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961961, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.601159] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961960, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.767766] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.768232] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.768523] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1091.768728] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1091.768904] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1091.771090] env[68217]: INFO nova.compute.manager [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Terminating instance [ 1092.015361] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1092.015717] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1092.015873] env[68217]: INFO nova.compute.manager [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Attaching volume a6379fd6-1468-4eff-945c-6ffe74897b05 to /dev/sdb [ 1092.044092] env[68217]: DEBUG oslo_vmware.api [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Task: {'id': task-2961959, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.735501} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.044092] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1092.044228] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1092.044414] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1092.044624] env[68217]: INFO nova.compute.manager [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1092.046206] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1092.046206] env[68217]: DEBUG nova.compute.manager [-] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1092.046206] env[68217]: DEBUG nova.network.neutron [-] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1092.049500] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d396748f-f2a5-4c27-8676-f82f68123fde {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.057079] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e159c4cd-78b7-4717-9a33-873a37ccc66a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.071198] env[68217]: DEBUG nova.virt.block_device [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating existing volume attachment record: 1caed0d7-c621-42bc-bc2f-b1e680071cef {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1092.081721] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961961, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.189131} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.084072] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1092.085142] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7e7637-28ce-461c-9317-0f8ca93de1aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.111039] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1092.115222] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-58dc063f-9206-427e-9e9a-9b73bd98658a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.138921] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961960, 'name': ReconfigVM_Task, 'duration_secs': 0.87357} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.140477] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Reconfigured VM instance instance-0000006e to attach disk [datastore1] 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9/38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.141400] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1092.141400] env[68217]: value = "task-2961962" [ 1092.141400] env[68217]: _type = "Task" [ 1092.141400] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.141651] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-76bfd9b5-95c4-4c6d-b0a2-1c602e7becea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.154572] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961962, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.156164] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1092.156164] env[68217]: value = "task-2961963" [ 1092.156164] env[68217]: _type = "Task" [ 1092.156164] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.164930] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961963, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.274626] env[68217]: DEBUG nova.compute.manager [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1092.274940] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1092.275870] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1146deb-344d-4149-81be-b115fa4664cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.283095] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1092.283352] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-da809e62-f054-4646-8de6-a7e2fbf9f3e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.290181] env[68217]: DEBUG oslo_vmware.api [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1092.290181] env[68217]: value = "task-2961965" [ 1092.290181] env[68217]: _type = "Task" [ 1092.290181] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.299283] env[68217]: DEBUG oslo_vmware.api [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961965, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.361545] env[68217]: DEBUG nova.compute.manager [req-dbe93957-a7f8-4afa-b186-8ccd39cd4c2e req-0f1e712a-f0cc-4da7-809a-95b3c8a0b00d service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Received event network-vif-deleted-56c6f7cc-e41b-4494-84b7-8925f5d7e59d {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1092.361694] env[68217]: INFO nova.compute.manager [req-dbe93957-a7f8-4afa-b186-8ccd39cd4c2e req-0f1e712a-f0cc-4da7-809a-95b3c8a0b00d service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Neutron deleted interface 56c6f7cc-e41b-4494-84b7-8925f5d7e59d; detaching it from the instance and deleting it from the info cache [ 1092.361856] env[68217]: DEBUG nova.network.neutron [req-dbe93957-a7f8-4afa-b186-8ccd39cd4c2e req-0f1e712a-f0cc-4da7-809a-95b3c8a0b00d service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.659401] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961962, 'name': ReconfigVM_Task, 'duration_secs': 0.496615} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.662167] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1092.663028] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-73a528ab-ac89-4485-b6db-b445aa6ef640 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.674992] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961963, 'name': Rename_Task, 'duration_secs': 0.177549} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.676284] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1092.676624] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1092.676624] env[68217]: value = "task-2961968" [ 1092.676624] env[68217]: _type = "Task" [ 1092.676624] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.676882] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a5ec07b6-2e6c-461e-8adc-6b023ad43c33 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.686356] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1092.686356] env[68217]: value = "task-2961969" [ 1092.686356] env[68217]: _type = "Task" [ 1092.686356] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.692751] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961968, 'name': Rename_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.697345] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961969, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.778848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d7e351-f553-44d8-9bec-731d6927ee45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.785783] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0f4d807-4920-43eb-958b-9978e9bbcd69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.820851] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1298d15a-c4b7-43b0-b853-ca17b26cce09 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.826979] env[68217]: DEBUG oslo_vmware.api [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961965, 'name': PowerOffVM_Task, 'duration_secs': 0.467393} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.827780] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1092.827993] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1092.828268] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fec29226-ccb6-45d4-8ae5-f4e34cb23e2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.833961] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9267dc99-ba9f-427d-bf5d-0ccee89e5277 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.847765] env[68217]: DEBUG nova.network.neutron [-] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1092.849235] env[68217]: DEBUG nova.compute.provider_tree [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1092.864937] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5d1d0477-557b-46c5-85e5-0f86f3cdb2ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.874414] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a399352-1560-4cfb-ad38-4fe0d2b2bf24 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.894950] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1092.895220] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1092.895421] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleting the datastore file [datastore2] 776798bf-1ad4-4acb-ac58-cacc5493e1c7 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1092.895669] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cc352ca7-69db-4ede-aaa7-084bf4f92a4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.912363] env[68217]: DEBUG nova.compute.manager [req-dbe93957-a7f8-4afa-b186-8ccd39cd4c2e req-0f1e712a-f0cc-4da7-809a-95b3c8a0b00d service nova] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Detach interface failed, port_id=56c6f7cc-e41b-4494-84b7-8925f5d7e59d, reason: Instance 3af571ce-c400-45a1-97ad-4fbd53395129 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1092.912883] env[68217]: DEBUG oslo_vmware.api [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1092.912883] env[68217]: value = "task-2961971" [ 1092.912883] env[68217]: _type = "Task" [ 1092.912883] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.920580] env[68217]: DEBUG oslo_vmware.api [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961971, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.188555] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961968, 'name': Rename_Task, 'duration_secs': 0.161725} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.191493] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1093.191739] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cdcaae6d-741c-4c5b-8196-2f5b17b25f8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.198270] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961969, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.199525] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1093.199525] env[68217]: value = "task-2961972" [ 1093.199525] env[68217]: _type = "Task" [ 1093.199525] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.207035] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961972, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.352040] env[68217]: INFO nova.compute.manager [-] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Took 1.31 seconds to deallocate network for instance. [ 1093.354647] env[68217]: DEBUG nova.scheduler.client.report [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1093.422676] env[68217]: DEBUG oslo_vmware.api [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2961971, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193798} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.422937] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1093.423133] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1093.423310] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1093.423478] env[68217]: INFO nova.compute.manager [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1093.423717] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1093.423906] env[68217]: DEBUG nova.compute.manager [-] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1093.424009] env[68217]: DEBUG nova.network.neutron [-] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1093.701147] env[68217]: DEBUG oslo_vmware.api [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961969, 'name': PowerOnVM_Task, 'duration_secs': 0.580814} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.705262] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1093.705629] env[68217]: INFO nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Took 9.88 seconds to spawn the instance on the hypervisor. [ 1093.709015] env[68217]: DEBUG nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1093.709015] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a5cd197-4408-40d3-8538-32796c4a611f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.715727] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961972, 'name': PowerOnVM_Task} progress is 78%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.861548] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.339s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1093.865081] env[68217]: DEBUG nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1093.867533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1093.867935] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1093.868290] env[68217]: DEBUG nova.objects.instance [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lazy-loading 'resources' on Instance uuid 3af571ce-c400-45a1-97ad-4fbd53395129 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1094.212409] env[68217]: DEBUG oslo_vmware.api [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2961972, 'name': PowerOnVM_Task, 'duration_secs': 0.816277} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.215083] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1094.215083] env[68217]: INFO nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1094.215083] env[68217]: DEBUG nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1094.215083] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6ea993e-983b-4854-b2f1-ab3b5d272615 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.229772] env[68217]: INFO nova.compute.manager [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Took 17.13 seconds to build instance. [ 1094.372443] env[68217]: DEBUG nova.compute.utils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1094.377128] env[68217]: DEBUG nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1094.377460] env[68217]: DEBUG nova.network.neutron [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1094.388732] env[68217]: DEBUG nova.compute.manager [req-c149d467-973b-4906-8666-8af3e0c62933 req-b95cfdfb-999c-4f11-88fb-8ac37ba634bb service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Received event network-vif-deleted-c9ba3459-1c52-46fa-b8b7-7f41a840a334 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1094.389427] env[68217]: INFO nova.compute.manager [req-c149d467-973b-4906-8666-8af3e0c62933 req-b95cfdfb-999c-4f11-88fb-8ac37ba634bb service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Neutron deleted interface c9ba3459-1c52-46fa-b8b7-7f41a840a334; detaching it from the instance and deleting it from the info cache [ 1094.389427] env[68217]: DEBUG nova.network.neutron [req-c149d467-973b-4906-8666-8af3e0c62933 req-b95cfdfb-999c-4f11-88fb-8ac37ba634bb service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.390663] env[68217]: DEBUG nova.network.neutron [-] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1094.443231] env[68217]: DEBUG nova.policy [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1094.579620] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "8f0d5766-005e-459d-b9f4-e46b435e43b8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.579783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.579899] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "8f0d5766-005e-459d-b9f4-e46b435e43b8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.580101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.580274] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.582703] env[68217]: INFO nova.compute.manager [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Terminating instance [ 1094.626523] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82653ce5-090f-43a2-b330-99e7868da681 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.642476] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70e3654e-712c-4830-a92f-0ff998e69c4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.673812] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.674821] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0c5fb48-53b4-4eaa-bee8-a0fd4122f58b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.682540] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac627ffe-f020-485a-bec2-6357bd8b76c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.696617] env[68217]: DEBUG nova.compute.provider_tree [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.734490] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1c044067-42b5-46d0-bde9-be456db26b46 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.642s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.734915] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.061s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.735140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.735299] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.735486] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1094.740096] env[68217]: INFO nova.compute.manager [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Took 14.77 seconds to build instance. [ 1094.740096] env[68217]: INFO nova.compute.manager [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Terminating instance [ 1094.830309] env[68217]: DEBUG nova.network.neutron [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Successfully created port: 3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1094.878401] env[68217]: DEBUG nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1094.893308] env[68217]: INFO nova.compute.manager [-] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Took 1.47 seconds to deallocate network for instance. [ 1094.893578] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-40a580ec-f44f-436e-ac6b-87a83ba084b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.912976] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd28378a-efd8-47fb-aef7-413806b9cc26 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.926709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1094.926932] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1094.952356] env[68217]: DEBUG nova.compute.manager [req-c149d467-973b-4906-8666-8af3e0c62933 req-b95cfdfb-999c-4f11-88fb-8ac37ba634bb service nova] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Detach interface failed, port_id=c9ba3459-1c52-46fa-b8b7-7f41a840a334, reason: Instance 776798bf-1ad4-4acb-ac58-cacc5493e1c7 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1095.083353] env[68217]: DEBUG nova.compute.manager [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Received event network-changed-189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1095.084146] env[68217]: DEBUG nova.compute.manager [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Refreshing instance network info cache due to event network-changed-189406da-f39b-4370-b43d-945cbb45afb2. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1095.084623] env[68217]: DEBUG oslo_concurrency.lockutils [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] Acquiring lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1095.084779] env[68217]: DEBUG oslo_concurrency.lockutils [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] Acquired lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1095.084969] env[68217]: DEBUG nova.network.neutron [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Refreshing network info cache for port 189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1095.090021] env[68217]: DEBUG nova.compute.manager [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.090021] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.090296] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6041a1d-ce17-4c83-bf5b-5abdcb5f9a5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.100216] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.100286] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7dc83869-3dcc-4116-bca7-684bf1e45c1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.106481] env[68217]: DEBUG oslo_vmware.api [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1095.106481] env[68217]: value = "task-2961974" [ 1095.106481] env[68217]: _type = "Task" [ 1095.106481] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.117907] env[68217]: DEBUG oslo_vmware.api [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961974, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.199937] env[68217]: DEBUG nova.scheduler.client.report [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1095.241441] env[68217]: DEBUG oslo_concurrency.lockutils [None req-74ee7810-3c68-460c-98c1-1c717649ecd8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.282s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.245780] env[68217]: DEBUG nova.compute.manager [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1095.245780] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.246511] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6981c491-462c-4259-83ce-4f45b5bcffaf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.255483] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.255742] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3a277d8c-2556-4a85-81f6-9d5cfc23cf70 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.261197] env[68217]: DEBUG oslo_vmware.api [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1095.261197] env[68217]: value = "task-2961975" [ 1095.261197] env[68217]: _type = "Task" [ 1095.261197] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.269585] env[68217]: DEBUG oslo_vmware.api [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961975, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.411575] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1095.432655] env[68217]: DEBUG nova.compute.utils [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1095.617259] env[68217]: DEBUG oslo_vmware.api [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961974, 'name': PowerOffVM_Task, 'duration_secs': 0.383745} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.617259] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.617259] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1095.619464] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8ba9f965-7963-4861-a04a-484b8825f727 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.679923] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1095.680167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1095.680353] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleting the datastore file [datastore1] 8f0d5766-005e-459d-b9f4-e46b435e43b8 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.680620] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e20eae04-01b7-416a-8637-ba7a0dba76d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.689674] env[68217]: DEBUG oslo_vmware.api [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1095.689674] env[68217]: value = "task-2961977" [ 1095.689674] env[68217]: _type = "Task" [ 1095.689674] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.697202] env[68217]: DEBUG oslo_vmware.api [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.708410] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.840s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1095.712446] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.301s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1095.712672] env[68217]: DEBUG nova.objects.instance [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'resources' on Instance uuid 776798bf-1ad4-4acb-ac58-cacc5493e1c7 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1095.761784] env[68217]: INFO nova.scheduler.client.report [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Deleted allocations for instance 3af571ce-c400-45a1-97ad-4fbd53395129 [ 1095.773883] env[68217]: DEBUG oslo_vmware.api [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961975, 'name': PowerOffVM_Task, 'duration_secs': 0.233332} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.774184] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.774398] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1095.774633] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-485352d5-93f9-4051-b004-65b4694e8458 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.814981] env[68217]: DEBUG nova.network.neutron [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updated VIF entry in instance network info cache for port 189406da-f39b-4370-b43d-945cbb45afb2. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1095.815431] env[68217]: DEBUG nova.network.neutron [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1095.831464] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1095.831672] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1095.831847] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleting the datastore file [datastore1] 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1095.832120] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b3216831-0898-42b0-8c5f-0d973b2c7c20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.839347] env[68217]: DEBUG oslo_vmware.api [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1095.839347] env[68217]: value = "task-2961979" [ 1095.839347] env[68217]: _type = "Task" [ 1095.839347] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.847137] env[68217]: DEBUG oslo_vmware.api [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.893278] env[68217]: DEBUG nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1095.920022] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1095.920279] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1095.920439] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1095.920696] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1095.920854] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1095.921053] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1095.921855] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1095.921855] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1095.921855] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1095.921855] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1095.922075] env[68217]: DEBUG nova.virt.hardware [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1095.922828] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a335f727-284e-473e-86b5-e7ffdb06c313 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.931607] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebab6553-568d-4e1f-ad90-47c6a9d1dacd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.936457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.009s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.201945] env[68217]: DEBUG oslo_vmware.api [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.16246} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.202338] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.202521] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1096.202696] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1096.202872] env[68217]: INFO nova.compute.manager [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1096.203152] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.203461] env[68217]: DEBUG nova.compute.manager [-] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1096.203461] env[68217]: DEBUG nova.network.neutron [-] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1096.278116] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb9e1fc-4534-4d53-9962-7f5a343c93d4 tempest-ServersTestJSON-518151278 tempest-ServersTestJSON-518151278-project-member] Lock "3af571ce-c400-45a1-97ad-4fbd53395129" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.376s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.317858] env[68217]: DEBUG oslo_concurrency.lockutils [req-80207492-e135-47f0-9589-fc1c411c4677 req-df0cd440-25ee-485c-9814-0b7a68c41a70 service nova] Releasing lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1096.353992] env[68217]: DEBUG oslo_vmware.api [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2961979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.171977} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1096.357853] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1096.357853] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1096.357853] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1096.357853] env[68217]: INFO nova.compute.manager [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1096.357853] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1096.357853] env[68217]: DEBUG nova.compute.manager [-] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1096.357853] env[68217]: DEBUG nova.network.neutron [-] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1096.430869] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f33ae7c5-6e18-4e39-89bf-ab5c5e27af1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.439366] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1d794cd-c5e5-4298-b96b-7a7d22a2b169 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.471514] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef8e2154-d69b-4ce0-a443-f1bf94c28dae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.479537] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd206bd4-7e4a-4f4d-837e-f8dd94144842 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.497281] env[68217]: DEBUG nova.compute.provider_tree [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1096.540573] env[68217]: DEBUG nova.network.neutron [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Successfully updated port: 3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1096.549295] env[68217]: DEBUG nova.compute.manager [req-72f03dba-a97c-4e49-b802-2a573c531bff req-51633d22-7ff8-4303-a91b-7f9c2ec711d9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-vif-plugged-3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1096.549295] env[68217]: DEBUG oslo_concurrency.lockutils [req-72f03dba-a97c-4e49-b802-2a573c531bff req-51633d22-7ff8-4303-a91b-7f9c2ec711d9 service nova] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1096.549295] env[68217]: DEBUG oslo_concurrency.lockutils [req-72f03dba-a97c-4e49-b802-2a573c531bff req-51633d22-7ff8-4303-a91b-7f9c2ec711d9 service nova] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1096.549295] env[68217]: DEBUG oslo_concurrency.lockutils [req-72f03dba-a97c-4e49-b802-2a573c531bff req-51633d22-7ff8-4303-a91b-7f9c2ec711d9 service nova] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1096.549295] env[68217]: DEBUG nova.compute.manager [req-72f03dba-a97c-4e49-b802-2a573c531bff req-51633d22-7ff8-4303-a91b-7f9c2ec711d9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] No waiting events found dispatching network-vif-plugged-3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1096.549295] env[68217]: WARNING nova.compute.manager [req-72f03dba-a97c-4e49-b802-2a573c531bff req-51633d22-7ff8-4303-a91b-7f9c2ec711d9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received unexpected event network-vif-plugged-3ee5d607-0533-4e05-9447-4840b4e48cdd for instance with vm_state building and task_state spawning. [ 1097.000709] env[68217]: DEBUG nova.scheduler.client.report [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1097.008203] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1097.008447] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1097.008667] env[68217]: INFO nova.compute.manager [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Attaching volume 1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d to /dev/sdb [ 1097.047396] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1097.048197] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1097.048197] env[68217]: DEBUG nova.network.neutron [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1097.052193] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483e30c7-c5b7-401f-9c94-88900ed2b2c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.060983] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b4d0d32-9621-4e1c-be1f-9a6d56711ed7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.074791] env[68217]: DEBUG nova.virt.block_device [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updating existing volume attachment record: 67a56820-c8d6-4b2c-baff-773d08a0c1a3 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1097.120664] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1097.120892] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594389', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'name': 'volume-a6379fd6-1468-4eff-945c-6ffe74897b05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6b4dff91-254e-43cc-85cf-7de6214dcafd', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'serial': 'a6379fd6-1468-4eff-945c-6ffe74897b05'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1097.121770] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77726ece-31d0-4989-ada6-3ac313aaa3d5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.139814] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0de21db6-30dd-49c9-b4c6-b961f41cb846 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.157585] env[68217]: DEBUG nova.network.neutron [-] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.167617] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] volume-a6379fd6-1468-4eff-945c-6ffe74897b05/volume-a6379fd6-1468-4eff-945c-6ffe74897b05.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1097.168829] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdfe1b01-9472-4b69-aa33-f7d32c790f8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.190044] env[68217]: DEBUG oslo_vmware.api [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1097.190044] env[68217]: value = "task-2961980" [ 1097.190044] env[68217]: _type = "Task" [ 1097.190044] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.198876] env[68217]: DEBUG oslo_vmware.api [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961980, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.259165] env[68217]: DEBUG nova.network.neutron [-] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.506240] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.794s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1097.534587] env[68217]: INFO nova.scheduler.client.report [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted allocations for instance 776798bf-1ad4-4acb-ac58-cacc5493e1c7 [ 1097.607133] env[68217]: DEBUG nova.network.neutron [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1097.671086] env[68217]: INFO nova.compute.manager [-] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Took 1.47 seconds to deallocate network for instance. [ 1097.704831] env[68217]: DEBUG oslo_vmware.api [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961980, 'name': ReconfigVM_Task, 'duration_secs': 0.373349} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.705315] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfigured VM instance instance-00000067 to attach disk [datastore2] volume-a6379fd6-1468-4eff-945c-6ffe74897b05/volume-a6379fd6-1468-4eff-945c-6ffe74897b05.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1097.712514] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-406ce278-81ac-45d2-b381-a06f4569b4e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.736375] env[68217]: DEBUG oslo_vmware.api [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1097.736375] env[68217]: value = "task-2961984" [ 1097.736375] env[68217]: _type = "Task" [ 1097.736375] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1097.744784] env[68217]: DEBUG oslo_vmware.api [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1097.764190] env[68217]: INFO nova.compute.manager [-] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Took 1.41 seconds to deallocate network for instance. [ 1098.045207] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b65c7fee-3c76-4972-b0fd-17349bb762e3 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "776798bf-1ad4-4acb-ac58-cacc5493e1c7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.277s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1098.063969] env[68217]: DEBUG nova.network.neutron [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.180421] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.180533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1098.180759] env[68217]: DEBUG nova.objects.instance [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lazy-loading 'resources' on Instance uuid 8f0d5766-005e-459d-b9f4-e46b435e43b8 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1098.248133] env[68217]: DEBUG oslo_vmware.api [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961984, 'name': ReconfigVM_Task, 'duration_secs': 0.138836} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1098.248687] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594389', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'name': 'volume-a6379fd6-1468-4eff-945c-6ffe74897b05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6b4dff91-254e-43cc-85cf-7de6214dcafd', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'serial': 'a6379fd6-1468-4eff-945c-6ffe74897b05'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1098.274047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1098.569111] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1098.569111] env[68217]: DEBUG nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Instance network_info: |[{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1098.569445] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:c3:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '78340140-126f-4ef8-a340-debaa64da3e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3ee5d607-0533-4e05-9447-4840b4e48cdd', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1098.582429] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1098.583794] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1098.586954] env[68217]: DEBUG nova.compute.manager [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1098.587185] env[68217]: DEBUG nova.compute.manager [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing instance network info cache due to event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1098.587979] env[68217]: DEBUG oslo_concurrency.lockutils [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.588112] env[68217]: DEBUG oslo_concurrency.lockutils [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1098.588365] env[68217]: DEBUG nova.network.neutron [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1098.589340] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc7d43fc-36b4-42c6-9860-3c0d2d95337e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.612523] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1098.612523] env[68217]: value = "task-2961985" [ 1098.612523] env[68217]: _type = "Task" [ 1098.612523] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1098.620905] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961985, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1098.947150] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242051f0-7f88-4c0d-ab27-4331961d1e33 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.955459] env[68217]: DEBUG nova.network.neutron [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updated VIF entry in instance network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1098.955803] env[68217]: DEBUG nova.network.neutron [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.960706] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8031358d-e838-46a4-98af-7c0d369c467a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.991967] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0caee5d8-923b-4ad6-b09e-e4c93acdaff1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.005154] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2903506f-0236-46c3-ae0f-0a322b170bf5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.019924] env[68217]: DEBUG nova.compute.provider_tree [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1099.123350] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2961985, 'name': CreateVM_Task, 'duration_secs': 0.441553} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.123604] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1099.124319] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.124488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.124794] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1099.125096] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c356cc1d-2dd0-4ce8-8175-6a0e083d4c0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.129589] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1099.129589] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ad8c29-ea9d-4562-fa8d-9acdb6c547f0" [ 1099.129589] env[68217]: _type = "Task" [ 1099.129589] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.137360] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ad8c29-ea9d-4562-fa8d-9acdb6c547f0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.319929] env[68217]: DEBUG nova.objects.instance [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lazy-loading 'flavor' on Instance uuid 6b4dff91-254e-43cc-85cf-7de6214dcafd {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1099.463330] env[68217]: DEBUG oslo_concurrency.lockutils [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.463610] env[68217]: DEBUG nova.compute.manager [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Received event network-vif-deleted-fb83e739-088d-4c2b-a569-56a62ab234aa {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1099.466497] env[68217]: DEBUG nova.compute.manager [req-6b09a700-e072-4d01-a956-2314c12e489a req-c651cd27-8bd1-4c81-b7a7-575b5bfe3d55 service nova] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Received event network-vif-deleted-3c55e109-8e58-427e-97e6-51b5fe90081f {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1099.525598] env[68217]: DEBUG nova.scheduler.client.report [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1099.528971] env[68217]: INFO nova.compute.manager [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Rescuing [ 1099.531425] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.531425] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.531425] env[68217]: DEBUG nova.network.neutron [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1099.641318] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ad8c29-ea9d-4562-fa8d-9acdb6c547f0, 'name': SearchDatastore_Task, 'duration_secs': 0.027245} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1099.641618] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1099.641852] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1099.642104] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.642255] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1099.642436] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1099.642697] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-89347191-9de0-47b2-b865-c57d14440500 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.656142] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1099.656366] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1099.657355] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1f76b46-579f-44bc-b71c-6859c9d3958f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.665842] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1099.665842] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d2cb52-fa52-9db9-16c4-3f834c6de59a" [ 1099.665842] env[68217]: _type = "Task" [ 1099.665842] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.672873] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d2cb52-fa52-9db9-16c4-3f834c6de59a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.825414] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae0c851b-2ee1-4aee-bf85-07dfc650fd20 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.810s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.031806] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.851s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.034570] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.761s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.034958] env[68217]: DEBUG nova.objects.instance [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lazy-loading 'resources' on Instance uuid 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1100.063331] env[68217]: INFO nova.scheduler.client.report [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted allocations for instance 8f0d5766-005e-459d-b9f4-e46b435e43b8 [ 1100.179047] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d2cb52-fa52-9db9-16c4-3f834c6de59a, 'name': SearchDatastore_Task, 'duration_secs': 0.021004} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.179386] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5caf7719-8279-4267-955b-de8913e42e5f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.186530] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1100.186530] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d7cdfa-109e-d268-5876-672ab9232d00" [ 1100.186530] env[68217]: _type = "Task" [ 1100.186530] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.195638] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d7cdfa-109e-d268-5876-672ab9232d00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.462699] env[68217]: DEBUG nova.network.neutron [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [{"id": "753c250a-569f-42f8-a9e7-fed02079c841", "address": "fa:16:3e:ee:87:7a", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753c250a-56", "ovs_interfaceid": "753c250a-569f-42f8-a9e7-fed02079c841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.572224] env[68217]: DEBUG oslo_concurrency.lockutils [None req-347ad305-6aef-48b3-a1fb-8aac2585cc0e tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "8f0d5766-005e-459d-b9f4-e46b435e43b8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.992s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1100.696712] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d7cdfa-109e-d268-5876-672ab9232d00, 'name': SearchDatastore_Task, 'duration_secs': 0.015414} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.699428] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1100.699674] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6789dd7d-d042-4c29-a963-2b4b982d5b43/6789dd7d-d042-4c29-a963-2b4b982d5b43.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1100.700101] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9e7a2e1c-e2a2-4e85-be03-fd4c5089cd46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.707726] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1100.707726] env[68217]: value = "task-2961987" [ 1100.707726] env[68217]: _type = "Task" [ 1100.707726] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.716380] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961987, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.744119] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dfb5ab1-0ee8-4452-af45-70137f8ab6f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.752328] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1100.752616] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1100.757302] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-820727e3-adf8-45c3-b1ea-866f3248d3f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.791759] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f15b2b2-4d88-45a6-8231-7a4eb33a7837 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.800197] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225211e2-db74-4f0a-86ef-ba1616a29c5b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.814766] env[68217]: DEBUG nova.compute.provider_tree [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.966088] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1101.217276] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961987, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.496917} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.217572] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6789dd7d-d042-4c29-a963-2b4b982d5b43/6789dd7d-d042-4c29-a963-2b4b982d5b43.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1101.217771] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1101.218032] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-09285917-ca85-4057-b650-0d33e50761c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.225176] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1101.225176] env[68217]: value = "task-2961988" [ 1101.225176] env[68217]: _type = "Task" [ 1101.225176] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.232775] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961988, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.259644] env[68217]: DEBUG nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1101.318485] env[68217]: DEBUG nova.scheduler.client.report [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1101.628068] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1101.628370] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1101.629286] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40755607-86a4-4040-81fb-30369b7ea33d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.648212] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e87f8b4-e384-49bd-8307-e2c93e6d7073 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.673994] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d/volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.674144] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e75dbf4-9486-49d4-a5e2-44ad7c39403c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.692950] env[68217]: DEBUG oslo_vmware.api [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1101.692950] env[68217]: value = "task-2961989" [ 1101.692950] env[68217]: _type = "Task" [ 1101.692950] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.702192] env[68217]: DEBUG oslo_vmware.api [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961989, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.734738] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961988, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064323} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.735057] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1101.735870] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1eaad49c-650d-458c-b36f-1f393e5134e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.760890] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Reconfiguring VM instance instance-00000070 to attach disk [datastore2] 6789dd7d-d042-4c29-a963-2b4b982d5b43/6789dd7d-d042-4c29-a963-2b4b982d5b43.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1101.761238] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e422c88f-09da-4443-b867-3a24b9b10729 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.790762] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1101.790762] env[68217]: value = "task-2961990" [ 1101.790762] env[68217]: _type = "Task" [ 1101.790762] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.796344] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1101.799676] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961990, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.823551] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.789s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1101.825789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.029s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1101.827627] env[68217]: INFO nova.compute.claims [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1101.843305] env[68217]: INFO nova.scheduler.client.report [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted allocations for instance 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9 [ 1102.210078] env[68217]: DEBUG oslo_vmware.api [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961989, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.302604] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961990, 'name': ReconfigVM_Task, 'duration_secs': 0.469994} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.303105] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Reconfigured VM instance instance-00000070 to attach disk [datastore2] 6789dd7d-d042-4c29-a963-2b4b982d5b43/6789dd7d-d042-4c29-a963-2b4b982d5b43.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.303638] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a81f16c1-0902-4588-9ed3-3943d67624bc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.311691] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1102.311691] env[68217]: value = "task-2961991" [ 1102.311691] env[68217]: _type = "Task" [ 1102.311691] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.323016] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961991, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.355849] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ae6c3f16-f29b-4f78-88e4-3b5a620d7137 tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.621s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1102.512035] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1102.512992] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-10f37841-3071-4485-9501-e84287ffd50d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.520453] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1102.520453] env[68217]: value = "task-2961992" [ 1102.520453] env[68217]: _type = "Task" [ 1102.520453] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.531523] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961992, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.705268] env[68217]: DEBUG oslo_vmware.api [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961989, 'name': ReconfigVM_Task, 'duration_secs': 0.567992} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.705697] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d/volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1102.711894] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fc263314-6977-48df-bf58-2a6a1e8ddc54 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.738322] env[68217]: DEBUG oslo_vmware.api [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1102.738322] env[68217]: value = "task-2961993" [ 1102.738322] env[68217]: _type = "Task" [ 1102.738322] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.749628] env[68217]: DEBUG oslo_vmware.api [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961993, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.825048] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961991, 'name': Rename_Task, 'duration_secs': 0.142913} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.825048] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1102.826635] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-323bf056-c966-4f27-8939-2608fa009fd7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.845939] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1102.845939] env[68217]: value = "task-2961994" [ 1102.845939] env[68217]: _type = "Task" [ 1102.845939] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.859420] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961994, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.041662] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961992, 'name': PowerOffVM_Task, 'duration_secs': 0.274671} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.041662] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1103.041662] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23b82ba2-33cb-499a-9f7f-cbf8bf6f266a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.068210] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1784f2e-efba-473f-b1ed-06832ad2ad47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.113300] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1103.113610] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d818d584-8af7-4365-96ab-2624b9905e2c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.125325] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1103.125325] env[68217]: value = "task-2961995" [ 1103.125325] env[68217]: _type = "Task" [ 1103.125325] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.131348] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b007bc31-61b0-41a5-afba-dc7fc5e10692 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.145180] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "309aa787-aa7d-4508-bf90-499958747c46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.145862] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "309aa787-aa7d-4508-bf90-499958747c46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.147245] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1103.147245] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1103.147681] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1103.147798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1103.147938] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.152020] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9ef1507-eff2-482f-bf0b-87647914d950 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.153548] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f72455-d8ac-46ef-96d3-ac8d6db9d99b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.194514] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e893d849-9b01-464c-a9e3-79e356fa90f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.197970] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.198170] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1103.198911] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "1d8973e7-1da3-4c17-9516-007b2356854f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.199142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "1d8973e7-1da3-4c17-9516-007b2356854f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.200487] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76a446f1-1af7-4cb7-a6fa-6689dbf72860 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.210408] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2527d458-1ca3-4e19-a290-4fbc6defa296 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.214604] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1103.214604] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52309200-c1b2-a093-4676-29fd2347b163" [ 1103.214604] env[68217]: _type = "Task" [ 1103.214604] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.226811] env[68217]: DEBUG nova.compute.provider_tree [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1103.234312] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52309200-c1b2-a093-4676-29fd2347b163, 'name': SearchDatastore_Task, 'duration_secs': 0.021067} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.234859] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43f3db12-71c4-4b9e-8825-40a6f2f5712f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.243890] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1103.243890] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527745b7-ae46-4449-c4c6-855bafefba96" [ 1103.243890] env[68217]: _type = "Task" [ 1103.243890] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.247383] env[68217]: DEBUG oslo_vmware.api [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961993, 'name': ReconfigVM_Task, 'duration_secs': 0.158687} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.250159] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1103.256488] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527745b7-ae46-4449-c4c6-855bafefba96, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.357138] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961994, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.585031] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1103.585031] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1103.651806] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1103.703918] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1103.730607] env[68217]: DEBUG nova.scheduler.client.report [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1103.757242] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527745b7-ae46-4449-c4c6-855bafefba96, 'name': SearchDatastore_Task, 'duration_secs': 0.025019} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.759474] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1103.759761] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. {{(pid=68217) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1103.760246] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-65a5376b-16f4-4193-bec4-f87943b48e6b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.771418] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1103.771418] env[68217]: value = "task-2961996" [ 1103.771418] env[68217]: _type = "Task" [ 1103.771418] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.779442] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961996, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.859415] env[68217]: DEBUG oslo_vmware.api [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2961994, 'name': PowerOnVM_Task, 'duration_secs': 0.719036} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1103.859748] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1103.859929] env[68217]: INFO nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Took 7.97 seconds to spawn the instance on the hypervisor. [ 1103.860120] env[68217]: DEBUG nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1103.860927] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3774cc79-9a5d-4d24-aed3-52802fa18f36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.088072] env[68217]: DEBUG nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1104.178016] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.225395] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.236426] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.411s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.237032] env[68217]: DEBUG nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1104.240289] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.062s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1104.241676] env[68217]: INFO nova.compute.claims [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1104.282474] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961996, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.300216] env[68217]: DEBUG nova.objects.instance [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'flavor' on Instance uuid a4dcc7fb-83e4-4bb9-9c98-9569daee1435 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1104.380041] env[68217]: INFO nova.compute.manager [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Took 12.87 seconds to build instance. [ 1104.609079] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1104.746270] env[68217]: DEBUG nova.compute.utils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1104.755123] env[68217]: DEBUG nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1104.755123] env[68217]: DEBUG nova.network.neutron [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1104.771102] env[68217]: DEBUG nova.compute.manager [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1104.771102] env[68217]: DEBUG nova.compute.manager [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing instance network info cache due to event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1104.771102] env[68217]: DEBUG oslo_concurrency.lockutils [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1104.771102] env[68217]: DEBUG oslo_concurrency.lockutils [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1104.771102] env[68217]: DEBUG nova.network.neutron [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1104.790890] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961996, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805109} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.791492] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk. [ 1104.793217] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ab3899-aeb7-46fc-b22e-830386c044f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.825462] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfiguring VM instance instance-00000067 to attach disk [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1104.826117] env[68217]: DEBUG oslo_concurrency.lockutils [None req-54b442d5-5393-4d2e-add0-92f092f75157 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.818s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.828410] env[68217]: DEBUG nova.policy [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd229078579a54e6991e85bc49326c0b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3068bf39ee943f1bdf378f8b2a5c360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1104.830483] env[68217]: INFO nova.compute.manager [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Rebuilding instance [ 1104.832987] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be58ebc6-5a01-471d-b125-efa920dcb94e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.855967] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1104.855967] env[68217]: value = "task-2961997" [ 1104.855967] env[68217]: _type = "Task" [ 1104.855967] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.865010] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961997, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.884185] env[68217]: DEBUG oslo_concurrency.lockutils [None req-85d62cd5-ad0f-4ff9-a206-e4635c397bd2 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.390s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1104.899426] env[68217]: DEBUG nova.compute.manager [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1104.900378] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a665d46f-4089-43b5-907b-6f0e3db3e2fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.254323] env[68217]: DEBUG nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1105.313365] env[68217]: DEBUG nova.network.neutron [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Successfully created port: ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1105.365263] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961997, 'name': ReconfigVM_Task, 'duration_secs': 0.340552} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.365579] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfigured VM instance instance-00000067 to attach disk [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd/575ba628-84b6-4b0c-98ba-305166627d10-rescue.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1105.366574] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d2366a-e187-427f-a374-f286b41a046e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.407702] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-61e12a45-58ce-4942-afdf-8ac72ddade82 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.428276] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1105.428276] env[68217]: value = "task-2961998" [ 1105.428276] env[68217]: _type = "Task" [ 1105.428276] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.438316] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961998, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.519556] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2eddaee-c152-4690-809f-d6eb24c4beb9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.529212] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58cdb616-ed47-45de-b098-e4c541c75b1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.565229] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c476208f-8b6e-4467-a6d0-9a7de1968294 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.573420] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baf5aa1d-2bed-4401-a4b8-296db681521a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.587358] env[68217]: DEBUG nova.compute.provider_tree [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1105.687142] env[68217]: DEBUG nova.network.neutron [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updated VIF entry in instance network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1105.687517] env[68217]: DEBUG nova.network.neutron [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.922700] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1105.922879] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1641fb23-3908-45ac-91da-32703d51ea21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.933864] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1105.933864] env[68217]: value = "task-2961999" [ 1105.933864] env[68217]: _type = "Task" [ 1105.933864] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.937225] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2961998, 'name': ReconfigVM_Task, 'duration_secs': 0.164407} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.940822] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1105.941104] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c2f708b6-f7d0-473d-ba6e-b85eacb72856 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.947326] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961999, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.948513] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1105.948513] env[68217]: value = "task-2962000" [ 1105.948513] env[68217]: _type = "Task" [ 1105.948513] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1105.956043] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962000, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.091125] env[68217]: DEBUG nova.scheduler.client.report [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1106.190449] env[68217]: DEBUG oslo_concurrency.lockutils [req-7982cfdf-c56c-4e4e-ac26-c9c4413053a8 req-c418b256-d700-43bb-962e-d006f361c4ba service nova] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1106.268018] env[68217]: DEBUG nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1106.297234] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1106.297520] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.297682] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1106.297873] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.298122] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1106.298313] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1106.298546] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1106.298716] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1106.298878] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1106.299061] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1106.299500] env[68217]: DEBUG nova.virt.hardware [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1106.300426] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93add85f-2814-435f-b0c6-8733db91d51a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.308904] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82474b51-3453-471d-bd52-abdba288aa0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.448226] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2961999, 'name': PowerOffVM_Task, 'duration_secs': 0.349637} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.448606] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1106.462130] env[68217]: DEBUG oslo_vmware.api [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962000, 'name': PowerOnVM_Task, 'duration_secs': 0.434507} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1106.462420] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1106.465078] env[68217]: DEBUG nova.compute.manager [None req-c8631739-2690-4cf0-bd90-b264a92fbd8a tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1106.465867] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c753712d-85d6-4203-8376-2c9db2914c83 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.506467] env[68217]: INFO nova.compute.manager [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Detaching volume 1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d [ 1106.537909] env[68217]: INFO nova.virt.block_device [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Attempting to driver detach volume 1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d from mountpoint /dev/sdb [ 1106.538194] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1106.538395] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1106.539424] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6134209f-846a-4a69-9f81-4d8fbf49f16c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.561233] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd18626-6600-4f69-bf77-96c9f6e6cf60 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.569910] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95dcd665-3268-452f-bcf6-f85761a05a30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.592397] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d9ff561-ed19-4c46-bb13-369c1425e714 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.595481] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.595967] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1106.598674] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.373s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.600050] env[68217]: INFO nova.compute.claims [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1106.617657] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] The volume has not been displaced from its original location: [datastore2] volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d/volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1106.623049] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1106.623415] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-559485f0-a1f8-46bb-9d63-e75621a1d501 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.645286] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1106.645286] env[68217]: value = "task-2962001" [ 1106.645286] env[68217]: _type = "Task" [ 1106.645286] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.654536] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962001, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.800059] env[68217]: DEBUG nova.compute.manager [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.800245] env[68217]: DEBUG nova.compute.manager [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing instance network info cache due to event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1106.800453] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1106.800594] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1106.800753] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1106.919172] env[68217]: DEBUG nova.compute.manager [req-17338fa4-3e98-463b-867d-9c313c9c0507 req-4dc1db8b-05aa-49f0-ad20-942988df46aa service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Received event network-vif-plugged-ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1106.919400] env[68217]: DEBUG oslo_concurrency.lockutils [req-17338fa4-3e98-463b-867d-9c313c9c0507 req-4dc1db8b-05aa-49f0-ad20-942988df46aa service nova] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1106.919641] env[68217]: DEBUG oslo_concurrency.lockutils [req-17338fa4-3e98-463b-867d-9c313c9c0507 req-4dc1db8b-05aa-49f0-ad20-942988df46aa service nova] Lock "759149be-178f-4238-b9c3-c316d060d6be-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1106.919815] env[68217]: DEBUG oslo_concurrency.lockutils [req-17338fa4-3e98-463b-867d-9c313c9c0507 req-4dc1db8b-05aa-49f0-ad20-942988df46aa service nova] Lock "759149be-178f-4238-b9c3-c316d060d6be-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1106.919988] env[68217]: DEBUG nova.compute.manager [req-17338fa4-3e98-463b-867d-9c313c9c0507 req-4dc1db8b-05aa-49f0-ad20-942988df46aa service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] No waiting events found dispatching network-vif-plugged-ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1106.920245] env[68217]: WARNING nova.compute.manager [req-17338fa4-3e98-463b-867d-9c313c9c0507 req-4dc1db8b-05aa-49f0-ad20-942988df46aa service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Received unexpected event network-vif-plugged-ccbc8261-8d58-4e71-9a59-ac46dac31267 for instance with vm_state building and task_state spawning. [ 1107.004903] env[68217]: DEBUG nova.network.neutron [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Successfully updated port: ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.107312] env[68217]: DEBUG nova.compute.utils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1107.108693] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1107.108867] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1107.144932] env[68217]: DEBUG nova.policy [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bb6b97339c44b8ca35e57e67e1462d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbbb8a9e27bf4e00ac2a97750661ddbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1107.155835] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962001, 'name': ReconfigVM_Task, 'duration_secs': 0.449633} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.156123] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1107.160803] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-45a20b32-8530-46de-b965-de029dfa9f3f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.176688] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1107.176688] env[68217]: value = "task-2962002" [ 1107.176688] env[68217]: _type = "Task" [ 1107.176688] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.186679] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962002, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.509900] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.509900] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1107.509900] env[68217]: DEBUG nova.network.neutron [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1107.579839] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Successfully created port: 626716af-4590-4f7f-a441-03ca790d82d4 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1107.613042] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1107.687661] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962002, 'name': ReconfigVM_Task, 'duration_secs': 0.394975} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.690213] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1107.853391] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b4f5aa0-ee5d-4eef-b30a-3651c5530e02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.862362] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dd6414e-ba9e-4d15-9043-ead63dee57d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.892678] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d569bfba-341e-4b0b-ac4d-f8f1002363bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.900872] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63b3e15b-a964-4114-9837-ba65a331b8d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.920046] env[68217]: DEBUG nova.compute.provider_tree [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1107.962176] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updated VIF entry in instance network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1107.962571] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.059446] env[68217]: DEBUG nova.network.neutron [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1108.094990] env[68217]: INFO nova.compute.manager [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Unrescuing [ 1108.095213] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.095365] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquired lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.095527] env[68217]: DEBUG nova.network.neutron [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1108.307450] env[68217]: DEBUG nova.network.neutron [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updating instance_info_cache with network_info: [{"id": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "address": "fa:16:3e:79:a8:c2", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccbc8261-8d", "ovs_interfaceid": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.422616] env[68217]: DEBUG nova.scheduler.client.report [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1108.466010] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.466010] env[68217]: DEBUG nova.compute.manager [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.466196] env[68217]: DEBUG nova.compute.manager [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing instance network info cache due to event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1108.466303] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.466491] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.466686] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1108.625121] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1108.653616] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1108.653895] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1108.654062] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1108.654243] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1108.654387] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1108.654533] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1108.654775] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1108.654934] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1108.655110] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1108.655275] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1108.655439] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1108.656311] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ab6f733-4cb8-4b1c-b2a9-2cc5b97f0706 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.665593] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2a6f741-1f0e-4531-9f12-d681c0e9e570 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.742826] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1108.742826] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c5088986-19fd-4b82-905d-23b0d4a3ae2e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.751797] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1108.751797] env[68217]: value = "task-2962003" [ 1108.751797] env[68217]: _type = "Task" [ 1108.751797] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.764881] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1108.765161] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1108.765346] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1108.766193] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-174e459b-6281-4c53-a6d4-8d499c4ee3d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.784698] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4615f47e-cd46-4db6-9be2-e3992c2e31cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.791079] env[68217]: WARNING nova.virt.vmwareapi.driver [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] The volume None does not exist!: nova.exception.DiskNotFound: Unable to find volume [ 1108.791345] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1108.792065] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0008addc-04f4-4434-9a18-59815e0896bd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.800540] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1108.800782] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4d311160-5ba9-4958-81f3-23c1d746e97f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.809969] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1108.810456] env[68217]: DEBUG nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Instance network_info: |[{"id": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "address": "fa:16:3e:79:a8:c2", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccbc8261-8d", "ovs_interfaceid": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1108.811018] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:79:a8:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ccbc8261-8d58-4e71-9a59-ac46dac31267', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1108.823734] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1108.824087] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1108.824433] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f7df3430-8227-48ac-9ba9-d2073944d1d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.843708] env[68217]: DEBUG nova.network.neutron [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [{"id": "753c250a-569f-42f8-a9e7-fed02079c841", "address": "fa:16:3e:ee:87:7a", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753c250a-56", "ovs_interfaceid": "753c250a-569f-42f8-a9e7-fed02079c841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.852028] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1108.852028] env[68217]: value = "task-2962005" [ 1108.852028] env[68217]: _type = "Task" [ 1108.852028] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.860523] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962005, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.869816] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1108.870087] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1108.870284] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleting the datastore file [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1108.870541] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4eac5e62-fb04-4e79-9672-78fe026b5b2b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.879255] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1108.879255] env[68217]: value = "task-2962006" [ 1108.879255] env[68217]: _type = "Task" [ 1108.879255] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.891515] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962006, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.928306] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.330s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1108.928986] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1108.932537] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.324s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1108.934355] env[68217]: INFO nova.compute.claims [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1108.951345] env[68217]: DEBUG nova.compute.manager [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Received event network-changed-ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1108.951546] env[68217]: DEBUG nova.compute.manager [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Refreshing instance network info cache due to event network-changed-ccbc8261-8d58-4e71-9a59-ac46dac31267. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1108.951869] env[68217]: DEBUG oslo_concurrency.lockutils [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] Acquiring lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1108.951938] env[68217]: DEBUG oslo_concurrency.lockutils [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] Acquired lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1108.952185] env[68217]: DEBUG nova.network.neutron [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Refreshing network info cache for port ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.299488] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updated VIF entry in instance network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.299831] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.346096] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Releasing lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.346794] env[68217]: DEBUG nova.objects.instance [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lazy-loading 'flavor' on Instance uuid 6b4dff91-254e-43cc-85cf-7de6214dcafd {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.349274] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.349525] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.349698] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.349867] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1109.368767] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962005, 'name': CreateVM_Task, 'duration_secs': 0.328561} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.369528] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1109.370222] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.370442] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.370742] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1109.371238] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ba5e645-88ec-4611-9a97-711dc0cfc44d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.379733] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1109.379733] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e8d5db-d412-418e-ec4c-22a87a2acd0f" [ 1109.379733] env[68217]: _type = "Task" [ 1109.379733] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.391743] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962006, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.159697} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.396246] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1109.397371] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1109.397753] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1109.400577] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e8d5db-d412-418e-ec4c-22a87a2acd0f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.439250] env[68217]: DEBUG nova.compute.utils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1109.442760] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1109.444061] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1109.532987] env[68217]: DEBUG nova.policy [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5bb6b97339c44b8ca35e57e67e1462d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbbb8a9e27bf4e00ac2a97750661ddbb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1109.802723] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.803634] env[68217]: DEBUG nova.compute.manager [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1109.803634] env[68217]: DEBUG nova.compute.manager [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing instance network info cache due to event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1109.803634] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.803875] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.803875] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1109.854510] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1109.855391] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb9585b-634d-4c3f-99bf-a6e8a3524c68 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.883088] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1109.886136] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e623d9f-35ae-4d53-b564-4aaa08b4c1ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.894608] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e8d5db-d412-418e-ec4c-22a87a2acd0f, 'name': SearchDatastore_Task, 'duration_secs': 0.043501} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1109.895875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1109.896121] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1109.896355] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1109.896550] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1109.896668] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1109.896973] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1109.896973] env[68217]: value = "task-2962007" [ 1109.896973] env[68217]: _type = "Task" [ 1109.896973] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.897196] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-16e877ee-e7ee-487d-89ec-f7685ff26c3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.903531] env[68217]: INFO nova.virt.block_device [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Booting with volume 1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d at /dev/sdb [ 1109.914844] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962007, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.916496] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1109.916768] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1109.917578] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5548643a-fcfc-47a7-9b06-d9e52c3c5f05 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.924156] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1109.924156] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d1f814-1d15-0988-f59b-4a8b1d1387e4" [ 1109.924156] env[68217]: _type = "Task" [ 1109.924156] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1109.934313] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d1f814-1d15-0988-f59b-4a8b1d1387e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1109.941538] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-957d975a-37a2-4fe6-bd95-2c26b4903e9b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.943928] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1109.949600] env[68217]: DEBUG nova.network.neutron [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updated VIF entry in instance network info cache for port ccbc8261-8d58-4e71-9a59-ac46dac31267. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1109.949600] env[68217]: DEBUG nova.network.neutron [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updating instance_info_cache with network_info: [{"id": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "address": "fa:16:3e:79:a8:c2", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccbc8261-8d", "ovs_interfaceid": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.961729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6de56b3f-d9a0-4b11-884f-ae16de9e796e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.977143] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Successfully updated port: 626716af-4590-4f7f-a441-03ca790d82d4 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1110.006904] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf9fb9d9-255a-4d81-8f88-7695dcefe718 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.017636] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-238dc042-7df0-43c7-a6d3-eefbdd3c6371 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.062392] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-69ba9770-8eea-44ab-af83-f5614732cf3b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.069786] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e696bb-84a5-45a5-821c-c5ac268ee226 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.075877] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Successfully created port: 3066fae5-f17e-4a05-8512-540ca8da5bcf {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1110.087342] env[68217]: DEBUG nova.virt.block_device [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updating existing volume attachment record: 5aae7315-a61c-42bc-8565-ea6d0876d77e {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1110.234858] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.235135] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.235339] env[68217]: INFO nova.compute.manager [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Shelving [ 1110.240014] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6be970fc-42ac-4c62-bc0e-4dc1073d2d40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.249044] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78bf6cc3-9720-4535-963f-c36e300c5ba7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.287324] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1b8f6b5-71e1-4809-b132-4ec744d22e46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.296365] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4be519-d526-4f48-8ee4-b39aec9eb1b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.313349] env[68217]: DEBUG nova.compute.provider_tree [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.408601] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962007, 'name': PowerOffVM_Task, 'duration_secs': 0.250183} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.408866] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1110.414055] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfiguring VM instance instance-00000067 to detach disk 2002 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1110.414328] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a8cc1b67-e2e9-4b09-b870-18ba19d9d478 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.438523] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d1f814-1d15-0988-f59b-4a8b1d1387e4, 'name': SearchDatastore_Task, 'duration_secs': 0.018648} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.440362] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1110.440362] env[68217]: value = "task-2962008" [ 1110.440362] env[68217]: _type = "Task" [ 1110.440362] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.440604] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdaecd28-cd48-4236-a9c4-12854a86645d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.454820] env[68217]: DEBUG oslo_concurrency.lockutils [req-84542c05-83b8-4bf5-90e4-e28ebff72226 req-1355e6cd-5caf-45a1-8644-0daf53d3e383 service nova] Releasing lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.455290] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1110.455290] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e526cf-2b1c-d939-47c9-6651cec576eb" [ 1110.455290] env[68217]: _type = "Task" [ 1110.455290] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.455865] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962008, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.464764] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e526cf-2b1c-d939-47c9-6651cec576eb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.480725] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "refresh_cache-309aa787-aa7d-4508-bf90-499958747c46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.480897] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "refresh_cache-309aa787-aa7d-4508-bf90-499958747c46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1110.481292] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1110.763933] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updated VIF entry in instance network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1110.764282] env[68217]: DEBUG nova.network.neutron [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1110.816269] env[68217]: DEBUG nova.scheduler.client.report [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1110.955390] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962008, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1110.958918] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1110.970346] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e526cf-2b1c-d939-47c9-6651cec576eb, 'name': SearchDatastore_Task, 'duration_secs': 0.014929} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1110.970609] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1110.970867] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 759149be-178f-4238-b9c3-c316d060d6be/759149be-178f-4238-b9c3-c316d060d6be.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1110.971169] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d80fb0d8-c0cf-4e0e-ae13-692098ef1766 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.981245] env[68217]: DEBUG nova.compute.manager [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Received event network-vif-plugged-626716af-4590-4f7f-a441-03ca790d82d4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.981578] env[68217]: DEBUG oslo_concurrency.lockutils [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] Acquiring lock "309aa787-aa7d-4508-bf90-499958747c46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1110.982177] env[68217]: DEBUG oslo_concurrency.lockutils [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] Lock "309aa787-aa7d-4508-bf90-499958747c46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1110.982428] env[68217]: DEBUG oslo_concurrency.lockutils [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] Lock "309aa787-aa7d-4508-bf90-499958747c46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1110.982681] env[68217]: DEBUG nova.compute.manager [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] No waiting events found dispatching network-vif-plugged-626716af-4590-4f7f-a441-03ca790d82d4 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1110.982866] env[68217]: WARNING nova.compute.manager [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Received unexpected event network-vif-plugged-626716af-4590-4f7f-a441-03ca790d82d4 for instance with vm_state building and task_state spawning. [ 1110.983059] env[68217]: DEBUG nova.compute.manager [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Received event network-changed-626716af-4590-4f7f-a441-03ca790d82d4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1110.983259] env[68217]: DEBUG nova.compute.manager [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Refreshing instance network info cache due to event network-changed-626716af-4590-4f7f-a441-03ca790d82d4. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1110.983418] env[68217]: DEBUG oslo_concurrency.lockutils [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] Acquiring lock "refresh_cache-309aa787-aa7d-4508-bf90-499958747c46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1110.986391] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1110.986610] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1110.986765] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1110.987031] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1110.987098] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1110.987276] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1110.987490] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1110.987648] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1110.987810] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1110.987968] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1110.988188] env[68217]: DEBUG nova.virt.hardware [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1110.990592] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1110.990592] env[68217]: value = "task-2962009" [ 1110.990592] env[68217]: _type = "Task" [ 1110.990592] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1110.991357] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13f6606-f20a-4cd9-9a9b-42fd499a3b83 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.007913] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2626835f-737e-4169-81a7-1ac214027996 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.012687] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962009, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.025489] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1111.224418] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Updating instance_info_cache with network_info: [{"id": "626716af-4590-4f7f-a441-03ca790d82d4", "address": "fa:16:3e:52:e0:90", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap626716af-45", "ovs_interfaceid": "626716af-4590-4f7f-a441-03ca790d82d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1111.244493] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1111.245133] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e055a0d5-680d-41b3-bfd4-2b89d4bfd4a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.254245] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1111.254245] env[68217]: value = "task-2962010" [ 1111.254245] env[68217]: _type = "Task" [ 1111.254245] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.269949] env[68217]: DEBUG oslo_concurrency.lockutils [req-4a2d574b-bbe2-43d2-a836-cc45606d529a req-03b7bef1-f1fd-4b8e-846d-315aaaa3cefa service nova] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.270428] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962010, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.321188] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.389s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.321802] env[68217]: DEBUG nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1111.324582] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.470s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.324780] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1111.324935] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1111.325857] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6125524-7974-4a78-9c74-d575bf2c966b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.337638] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a70af819-2908-4096-a61e-1225e3d6f7de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.354864] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e4622c6-ddb8-4ac0-8400-beeb0be88fb6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.365280] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac25c89b-5d04-43f9-9965-04316905bf88 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.402901] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179599MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1111.403138] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1111.403292] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1111.456430] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962008, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.504574] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962009, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.510852} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.505381] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 759149be-178f-4238-b9c3-c316d060d6be/759149be-178f-4238-b9c3-c316d060d6be.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1111.505652] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1111.506126] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8ac0157f-97e3-4fb0-aa72-9ac39982c72e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.514889] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1111.514889] env[68217]: value = "task-2962011" [ 1111.514889] env[68217]: _type = "Task" [ 1111.514889] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.528533] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962011, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.726866] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "refresh_cache-309aa787-aa7d-4508-bf90-499958747c46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1111.728111] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Instance network_info: |[{"id": "626716af-4590-4f7f-a441-03ca790d82d4", "address": "fa:16:3e:52:e0:90", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap626716af-45", "ovs_interfaceid": "626716af-4590-4f7f-a441-03ca790d82d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1111.728111] env[68217]: DEBUG oslo_concurrency.lockutils [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] Acquired lock "refresh_cache-309aa787-aa7d-4508-bf90-499958747c46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1111.728111] env[68217]: DEBUG nova.network.neutron [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Refreshing network info cache for port 626716af-4590-4f7f-a441-03ca790d82d4 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1111.729878] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:52:e0:90', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '626716af-4590-4f7f-a441-03ca790d82d4', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1111.741588] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1111.743593] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1111.744039] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e395e0e5-f4d6-4ed8-a7f8-a32b6effd3b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.776888] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1111.776888] env[68217]: value = "task-2962012" [ 1111.776888] env[68217]: _type = "Task" [ 1111.776888] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.780356] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962010, 'name': PowerOffVM_Task, 'duration_secs': 0.315826} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.784593] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1111.786085] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9d443f9-2c87-4d70-b80e-1410dd5b53c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.794994] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962012, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1111.809850] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c386f992-520c-4666-bcb8-6386e2f8f696 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.831531] env[68217]: DEBUG nova.compute.utils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1111.833695] env[68217]: DEBUG nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1111.833922] env[68217]: DEBUG nova.network.neutron [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1111.923734] env[68217]: DEBUG nova.policy [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '84a059060b494427902f1974901f84cf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '132f93394d0e4acca4a3201b049e8538', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1111.955600] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962008, 'name': ReconfigVM_Task, 'duration_secs': 1.31941} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1111.955896] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfigured VM instance instance-00000067 to detach disk 2002 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1111.956092] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1111.956415] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f1a0b8b8-da14-4515-828c-163f3fc9eaeb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1111.965553] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1111.965553] env[68217]: value = "task-2962013" [ 1111.965553] env[68217]: _type = "Task" [ 1111.965553] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1111.972926] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.027089] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962011, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.090116} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.027089] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1112.027089] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ab34ab-60a3-493e-b3c6-b947bbd43a3e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.051496] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Reconfiguring VM instance instance-00000071 to attach disk [datastore2] 759149be-178f-4238-b9c3-c316d060d6be/759149be-178f-4238-b9c3-c316d060d6be.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1112.051496] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-19b973b7-fb6a-4d34-ac73-b7b348b0002b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.067339] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Successfully updated port: 3066fae5-f17e-4a05-8512-540ca8da5bcf {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1112.075892] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1112.075892] env[68217]: value = "task-2962014" [ 1112.075892] env[68217]: _type = "Task" [ 1112.075892] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.085927] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962014, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.204359] env[68217]: DEBUG oslo_concurrency.lockutils [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.204620] env[68217]: DEBUG oslo_concurrency.lockutils [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.217514] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1112.217739] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1112.217891] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1112.218080] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1112.218231] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1112.218374] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1112.218575] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1112.218729] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1112.218890] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1112.219072] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1112.219286] env[68217]: DEBUG nova.virt.hardware [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1112.220336] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-402e2e24-0c64-4e64-bb77-2707a17b5a92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.229938] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-558f9fe8-5f49-46c9-8c35-9a55e1cea2d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.247063] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:23:9b:2c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '018f74db-1dcd-49e4-bd11-2ab20c34e986', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1112.254456] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1112.254981] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1112.255511] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-17a6299c-f6e6-45d9-a0c5-dc55d5f717d5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.276512] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1112.276766] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1112.277082] env[68217]: INFO nova.compute.manager [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Rebooting instance [ 1112.278712] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1112.278712] env[68217]: value = "task-2962015" [ 1112.278712] env[68217]: _type = "Task" [ 1112.278712] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.297754] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962015, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.298186] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962012, 'name': CreateVM_Task, 'duration_secs': 0.354957} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.298402] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1112.299291] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.299534] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.300047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1112.300664] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4169eb6b-c3ea-44ff-9544-846a7ffa3fb9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.306722] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1112.306722] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d351a5-db2f-1533-3b51-4926cc40bc5a" [ 1112.306722] env[68217]: _type = "Task" [ 1112.306722] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.316787] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d351a5-db2f-1533-3b51-4926cc40bc5a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.321254] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1112.321679] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-5edd1af2-72c9-4c5f-b153-3564c8f5a1a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.332034] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1112.332034] env[68217]: value = "task-2962016" [ 1112.332034] env[68217]: _type = "Task" [ 1112.332034] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.337624] env[68217]: DEBUG nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1112.348929] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962016, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.438334] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.438561] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.438736] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.438877] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.439070] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d28bcf16-b081-4dc8-a975-2acaed222e15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.439232] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 6b4dff91-254e-43cc-85cf-7de6214dcafd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.439412] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a4dcc7fb-83e4-4bb9-9c98-9569daee1435 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.439666] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 815d1801-fa07-4466-850d-b1a36d630d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.439850] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.440009] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 89136574-575c-47da-928c-bd7a5dbb3a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.440381] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 400058d8-f9ca-41b9-a671-b04b0511d074 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.440572] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 6789dd7d-d042-4c29-a963-2b4b982d5b43 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.440736] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 759149be-178f-4238-b9c3-c316d060d6be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.440913] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 309aa787-aa7d-4508-bf90-499958747c46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.441112] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 1d8973e7-1da3-4c17-9516-007b2356854f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.441346] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 84f7ae5d-abbd-4102-b4a9-5468e0edefc6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1112.441752] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 16 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1112.441952] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=3584MB phys_disk=200GB used_disk=16GB total_vcpus=48 used_vcpus=16 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1112.460149] env[68217]: DEBUG nova.network.neutron [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Successfully created port: a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1112.475954] env[68217]: DEBUG oslo_vmware.api [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962013, 'name': PowerOnVM_Task, 'duration_secs': 0.420241} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.476338] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1112.476612] env[68217]: DEBUG nova.compute.manager [None req-ff13ce6c-25c6-4f26-8b90-b2092cf55b9b tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1112.478243] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8b225dd-6730-44e0-bcc7-b5d6373c04dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.569694] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "refresh_cache-1d8973e7-1da3-4c17-9516-007b2356854f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.569842] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "refresh_cache-1d8973e7-1da3-4c17-9516-007b2356854f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.569996] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.592501] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962014, 'name': ReconfigVM_Task, 'duration_secs': 0.310822} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.592780] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Reconfigured VM instance instance-00000071 to attach disk [datastore2] 759149be-178f-4238-b9c3-c316d060d6be/759149be-178f-4238-b9c3-c316d060d6be.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1112.593433] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-2ddc685a-012b-4a26-93f9-b6b203f7990a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.601362] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1112.601362] env[68217]: value = "task-2962017" [ 1112.601362] env[68217]: _type = "Task" [ 1112.601362] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.612628] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962017, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.621552] env[68217]: DEBUG nova.network.neutron [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Updated VIF entry in instance network info cache for port 626716af-4590-4f7f-a441-03ca790d82d4. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1112.621902] env[68217]: DEBUG nova.network.neutron [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Updating instance_info_cache with network_info: [{"id": "626716af-4590-4f7f-a441-03ca790d82d4", "address": "fa:16:3e:52:e0:90", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap626716af-45", "ovs_interfaceid": "626716af-4590-4f7f-a441-03ca790d82d4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1112.706046] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9def8b6-8e47-438a-abfa-b0f6373e1caa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.711107] env[68217]: DEBUG nova.compute.utils [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1112.718923] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a42b5c1b-1656-4efb-97e5-3cdfceb598dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.755065] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01734e53-324d-448a-870a-ad7446ec223c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.764125] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29c1757d-84de-4c2f-8980-1a501df1a034 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.779789] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1112.793894] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962015, 'name': CreateVM_Task, 'duration_secs': 0.374937} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.794678] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1112.795384] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.795588] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.796011] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1112.796427] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d89399e5-73c5-4b36-8a6f-001b0c94d0ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.802890] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1112.802890] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524988ad-572a-231f-ebbd-885bc7421546" [ 1112.802890] env[68217]: _type = "Task" [ 1112.802890] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.816593] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.816798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.816977] env[68217]: DEBUG nova.network.neutron [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1112.818438] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524988ad-572a-231f-ebbd-885bc7421546, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.825184] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d351a5-db2f-1533-3b51-4926cc40bc5a, 'name': SearchDatastore_Task, 'duration_secs': 0.011894} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.825880] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1112.826123] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1112.826405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1112.826970] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1112.826970] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1112.827285] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-659db9d3-4696-4476-ac64-85232215c4e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.841687] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962016, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.851350] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1112.851542] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1112.852315] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a5e9b0a6-d93d-4787-ad90-bf40ae25fae4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.859324] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1112.859324] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525e3454-675a-d3da-15a4-0171b9290f93" [ 1112.859324] env[68217]: _type = "Task" [ 1112.859324] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.870185] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525e3454-675a-d3da-15a4-0171b9290f93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.108941] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1113.115932] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962017, 'name': Rename_Task, 'duration_secs': 0.151673} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.116262] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1113.116539] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9560b28e-0486-4710-9600-a01284867273 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.124578] env[68217]: DEBUG oslo_concurrency.lockutils [req-0335cb51-8827-40f0-9680-1a00e928569e req-a2c9c77b-30b6-45db-92ea-8a0c9b1dabf8 service nova] Releasing lock "refresh_cache-309aa787-aa7d-4508-bf90-499958747c46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.125661] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1113.125661] env[68217]: value = "task-2962018" [ 1113.125661] env[68217]: _type = "Task" [ 1113.125661] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.134171] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.214858] env[68217]: DEBUG oslo_concurrency.lockutils [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.010s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.285741] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1113.315800] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524988ad-572a-231f-ebbd-885bc7421546, 'name': SearchDatastore_Task, 'duration_secs': 0.042079} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.316136] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.316371] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1113.316595] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.316739] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.316915] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1113.317409] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8b2608ea-df91-4508-b063-ae1a8e046bfc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.332523] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1113.332715] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1113.333538] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0bf64c55-77e0-4815-a702-08a6281341a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.345301] env[68217]: DEBUG nova.network.neutron [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Updating instance_info_cache with network_info: [{"id": "3066fae5-f17e-4a05-8512-540ca8da5bcf", "address": "fa:16:3e:cf:e1:a1", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3066fae5-f1", "ovs_interfaceid": "3066fae5-f17e-4a05-8512-540ca8da5bcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.354023] env[68217]: DEBUG nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1113.356475] env[68217]: DEBUG nova.compute.manager [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Received event network-vif-plugged-3066fae5-f17e-4a05-8512-540ca8da5bcf {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.356551] env[68217]: DEBUG oslo_concurrency.lockutils [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] Acquiring lock "1d8973e7-1da3-4c17-9516-007b2356854f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1113.357315] env[68217]: DEBUG oslo_concurrency.lockutils [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] Lock "1d8973e7-1da3-4c17-9516-007b2356854f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1113.357315] env[68217]: DEBUG oslo_concurrency.lockutils [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] Lock "1d8973e7-1da3-4c17-9516-007b2356854f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.357315] env[68217]: DEBUG nova.compute.manager [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] No waiting events found dispatching network-vif-plugged-3066fae5-f17e-4a05-8512-540ca8da5bcf {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1113.357315] env[68217]: WARNING nova.compute.manager [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Received unexpected event network-vif-plugged-3066fae5-f17e-4a05-8512-540ca8da5bcf for instance with vm_state building and task_state spawning. [ 1113.357544] env[68217]: DEBUG nova.compute.manager [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Received event network-changed-3066fae5-f17e-4a05-8512-540ca8da5bcf {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.357544] env[68217]: DEBUG nova.compute.manager [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Refreshing instance network info cache due to event network-changed-3066fae5-f17e-4a05-8512-540ca8da5bcf. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1113.360107] env[68217]: DEBUG oslo_concurrency.lockutils [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] Acquiring lock "refresh_cache-1d8973e7-1da3-4c17-9516-007b2356854f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.360331] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1113.360331] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f91fa7-9816-df85-b5de-7b103901897a" [ 1113.360331] env[68217]: _type = "Task" [ 1113.360331] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.372273] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962016, 'name': CreateSnapshot_Task, 'duration_secs': 0.947691} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.375059] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1113.377416] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73dffd61-d0c7-4136-9e13-7aec4f81b946 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.386651] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525e3454-675a-d3da-15a4-0171b9290f93, 'name': SearchDatastore_Task, 'duration_secs': 0.035081} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.387495] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f91fa7-9816-df85-b5de-7b103901897a, 'name': SearchDatastore_Task, 'duration_secs': 0.013386} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.389325] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1113.389552] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1113.390449] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1113.390449] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1113.390449] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1113.390449] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1113.390449] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1113.391263] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1113.391263] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1113.391263] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1113.391263] env[68217]: DEBUG nova.virt.hardware [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1113.393614] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cde2d106-ec4b-4e85-893c-9234781a366c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.398928] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-070f2317-c523-4194-8003-6a43a2e5a88c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.401358] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d22db6d9-23b7-4f95-a2bc-050dd2a9970b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.416029] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b96950-6e4c-4fa2-88b8-5d8f19a84534 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.421208] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1113.421208] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529cb598-79a3-05c5-bb12-8d05342a5dfe" [ 1113.421208] env[68217]: _type = "Task" [ 1113.421208] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.421534] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1113.421534] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526f3cfe-2a6e-7926-c43d-7e8206566d2c" [ 1113.421534] env[68217]: _type = "Task" [ 1113.421534] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.444139] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526f3cfe-2a6e-7926-c43d-7e8206566d2c, 'name': SearchDatastore_Task, 'duration_secs': 0.018304} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.447626] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.447901] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 309aa787-aa7d-4508-bf90-499958747c46/309aa787-aa7d-4508-bf90-499958747c46.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1113.448496] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529cb598-79a3-05c5-bb12-8d05342a5dfe, 'name': SearchDatastore_Task, 'duration_secs': 0.014481} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.448707] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ba162c16-6f33-4a0e-89ed-f31afcf284fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.450988] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.451244] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1113.451512] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2a5e96d4-a8bb-417d-9fa0-99fd8690b531 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.460057] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1113.460057] env[68217]: value = "task-2962020" [ 1113.460057] env[68217]: _type = "Task" [ 1113.460057] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.461370] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1113.461370] env[68217]: value = "task-2962019" [ 1113.461370] env[68217]: _type = "Task" [ 1113.461370] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.472698] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.475788] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962020, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.639221] env[68217]: DEBUG nova.compute.manager [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Received event network-changed-753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1113.639432] env[68217]: DEBUG nova.compute.manager [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing instance network info cache due to event network-changed-753c250a-569f-42f8-a9e7-fed02079c841. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1113.639658] env[68217]: DEBUG oslo_concurrency.lockutils [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] Acquiring lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1113.639811] env[68217]: DEBUG oslo_concurrency.lockutils [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] Acquired lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.639981] env[68217]: DEBUG nova.network.neutron [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing network info cache for port 753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.644887] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962018, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.648705] env[68217]: DEBUG nova.network.neutron [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.791101] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1113.791371] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.388s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1113.792130] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1113.792277] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Cleaning up deleted instances with incomplete migration {{(pid=68217) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1113.855824] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "refresh_cache-1d8973e7-1da3-4c17-9516-007b2356854f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1113.856228] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Instance network_info: |[{"id": "3066fae5-f17e-4a05-8512-540ca8da5bcf", "address": "fa:16:3e:cf:e1:a1", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3066fae5-f1", "ovs_interfaceid": "3066fae5-f17e-4a05-8512-540ca8da5bcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1113.856597] env[68217]: DEBUG oslo_concurrency.lockutils [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] Acquired lock "refresh_cache-1d8973e7-1da3-4c17-9516-007b2356854f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1113.856799] env[68217]: DEBUG nova.network.neutron [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Refreshing network info cache for port 3066fae5-f17e-4a05-8512-540ca8da5bcf {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1113.858406] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cf:e1:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '93341b73-918c-4e9d-9c66-ca171a54b574', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3066fae5-f17e-4a05-8512-540ca8da5bcf', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1113.878030] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1113.879869] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1113.880414] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8976e423-956b-4cc7-b024-b09798f8fd17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.905186] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1113.905186] env[68217]: value = "task-2962021" [ 1113.905186] env[68217]: _type = "Task" [ 1113.905186] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.917328] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1113.918121] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-6e9b8c5c-2f48-4981-a039-db432c2f1811 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.928642] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962021, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.930298] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1113.930298] env[68217]: value = "task-2962022" [ 1113.930298] env[68217]: _type = "Task" [ 1113.930298] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1113.940454] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962022, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.982597] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962019, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1113.982963] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962020, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.138125] env[68217]: DEBUG oslo_vmware.api [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962018, 'name': PowerOnVM_Task, 'duration_secs': 0.516701} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.141102] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1114.141102] env[68217]: INFO nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Took 7.87 seconds to spawn the instance on the hypervisor. [ 1114.141102] env[68217]: DEBUG nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1114.141102] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1022ab42-ca98-4bfc-9b99-d97e419a01e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.153742] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1114.285782] env[68217]: DEBUG oslo_concurrency.lockutils [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1114.286090] env[68217]: DEBUG oslo_concurrency.lockutils [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1114.286325] env[68217]: INFO nova.compute.manager [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Attaching volume a47bb4bc-3ddf-46eb-b753-4e8dcce58334 to /dev/sdb [ 1114.319819] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29249b0e-1e70-438e-87aa-3e22253740c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.327400] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b292aba-2760-4966-bf5d-c8d43c1af11f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.342850] env[68217]: DEBUG nova.virt.block_device [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updating existing volume attachment record: 3105fb50-fa4c-4acb-b649-92a3f3c0295a {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1114.349488] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.349731] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.349935] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.350064] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.350202] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1114.350372] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.439238] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962021, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.449041] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962022, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.475224] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962020, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.643856} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.478617] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1114.478857] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1114.479161] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.608225} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1114.479402] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-912d121d-d7ac-4c12-816d-77baa9d7c282 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.481366] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 309aa787-aa7d-4508-bf90-499958747c46/309aa787-aa7d-4508-bf90-499958747c46.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1114.481610] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1114.481863] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ce3b5d75-898f-4f39-b6e2-5ed49e0cdf57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.490169] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1114.490169] env[68217]: value = "task-2962023" [ 1114.490169] env[68217]: _type = "Task" [ 1114.490169] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.490673] env[68217]: DEBUG nova.network.neutron [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Updated VIF entry in instance network info cache for port 3066fae5-f17e-4a05-8512-540ca8da5bcf. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1114.491070] env[68217]: DEBUG nova.network.neutron [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Updating instance_info_cache with network_info: [{"id": "3066fae5-f17e-4a05-8512-540ca8da5bcf", "address": "fa:16:3e:cf:e1:a1", "network": {"id": "1d50c0a9-6adb-41a4-aaba-7f1b5d8738a0", "bridge": "br-int", "label": "tempest-MultipleCreateTestJSON-222715654-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "fbbb8a9e27bf4e00ac2a97750661ddbb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "93341b73-918c-4e9d-9c66-ca171a54b574", "external-id": "nsx-vlan-transportzone-663", "segmentation_id": 663, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3066fae5-f1", "ovs_interfaceid": "3066fae5-f17e-4a05-8512-540ca8da5bcf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.493686] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1114.493686] env[68217]: value = "task-2962024" [ 1114.493686] env[68217]: _type = "Task" [ 1114.493686] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1114.514744] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962023, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.514744] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.522220] env[68217]: DEBUG nova.network.neutron [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Successfully updated port: a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1114.658977] env[68217]: DEBUG nova.compute.manager [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1114.659594] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6e3895-0e5b-4f2e-b18b-774e43fa9ee1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1114.662576] env[68217]: INFO nova.compute.manager [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Took 12.88 seconds to build instance. [ 1114.693659] env[68217]: DEBUG nova.network.neutron [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updated VIF entry in instance network info cache for port 753c250a-569f-42f8-a9e7-fed02079c841. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1114.694044] env[68217]: DEBUG nova.network.neutron [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [{"id": "753c250a-569f-42f8-a9e7-fed02079c841", "address": "fa:16:3e:ee:87:7a", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753c250a-56", "ovs_interfaceid": "753c250a-569f-42f8-a9e7-fed02079c841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1114.921560] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962021, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.948589] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962022, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1114.995839] env[68217]: DEBUG oslo_concurrency.lockutils [req-198b5bd9-023b-45c2-81a6-d0eac10c7512 req-fd28e9f0-2a98-4e23-a88f-9f0e5cff10b7 service nova] Releasing lock "refresh_cache-1d8973e7-1da3-4c17-9516-007b2356854f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.010364] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.010364] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962023, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.025439] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "refresh_cache-84f7ae5d-abbd-4102-b4a9-5468e0edefc6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.025439] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquired lock "refresh_cache-84f7ae5d-abbd-4102-b4a9-5468e0edefc6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.025548] env[68217]: DEBUG nova.network.neutron [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1115.164819] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91ac1d5a-58ba-4793-83cd-2406aa53e72e tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.412s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.197472] env[68217]: DEBUG oslo_concurrency.lockutils [req-7bdc5f68-c2f0-4ae0-aea4-8dfd9e22f303 req-01f70c00-f257-4e51-b225-92ec728cad2d service nova] Releasing lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1115.429476] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962021, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.448265] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962022, 'name': CloneVM_Task, 'duration_secs': 1.470543} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.448547] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Created linked-clone VM from snapshot [ 1115.449295] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c66f44f8-77a8-48f3-afd8-6481b61f49b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.456652] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Uploading image 5f518e85-1779-448c-b381-424d3d5af7dd {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1115.485365] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1115.485365] env[68217]: value = "vm-594398" [ 1115.485365] env[68217]: _type = "VirtualMachine" [ 1115.485365] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1115.485651] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-6f306929-8a83-430a-9a6c-3f1ce27a985c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.495514] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease: (returnval){ [ 1115.495514] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520e985b-63cc-99fa-96fc-53b5a24710f0" [ 1115.495514] env[68217]: _type = "HttpNfcLease" [ 1115.495514] env[68217]: } obtained for exporting VM: (result){ [ 1115.495514] env[68217]: value = "vm-594398" [ 1115.495514] env[68217]: _type = "VirtualMachine" [ 1115.495514] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1115.495858] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the lease: (returnval){ [ 1115.495858] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520e985b-63cc-99fa-96fc-53b5a24710f0" [ 1115.495858] env[68217]: _type = "HttpNfcLease" [ 1115.495858] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1115.509087] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962023, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.513227] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962024, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.513444] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1115.513444] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520e985b-63cc-99fa-96fc-53b5a24710f0" [ 1115.513444] env[68217]: _type = "HttpNfcLease" [ 1115.513444] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1115.513816] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1115.513816] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]520e985b-63cc-99fa-96fc-53b5a24710f0" [ 1115.513816] env[68217]: _type = "HttpNfcLease" [ 1115.513816] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1115.514411] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db8cdb3-be0b-4557-9329-2735a5df3d79 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.522631] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f2353-b41d-1ed3-e225-f251794b3122/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1115.522810] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f2353-b41d-1ed3-e225-f251794b3122/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1115.618656] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-add53111-a1ca-4344-a8a0-be6b683e1e4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.664535] env[68217]: DEBUG nova.network.neutron [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1115.676677] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fccf319-2b40-410f-b64d-083bb327ef7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.686839] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Doing hard reboot of VM {{(pid=68217) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 1115.687129] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-0e50f59a-19b1-4a00-a1a5-dc23a47f82ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.694027] env[68217]: DEBUG nova.compute.manager [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Received event network-vif-plugged-a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.694287] env[68217]: DEBUG oslo_concurrency.lockutils [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] Acquiring lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1115.694499] env[68217]: DEBUG oslo_concurrency.lockutils [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1115.694659] env[68217]: DEBUG oslo_concurrency.lockutils [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1115.694898] env[68217]: DEBUG nova.compute.manager [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] No waiting events found dispatching network-vif-plugged-a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1115.695035] env[68217]: WARNING nova.compute.manager [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Received unexpected event network-vif-plugged-a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a for instance with vm_state building and task_state spawning. [ 1115.696250] env[68217]: DEBUG nova.compute.manager [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Received event network-changed-a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.696250] env[68217]: DEBUG nova.compute.manager [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Refreshing instance network info cache due to event network-changed-a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1115.696250] env[68217]: DEBUG oslo_concurrency.lockutils [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] Acquiring lock "refresh_cache-84f7ae5d-abbd-4102-b4a9-5468e0edefc6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.697331] env[68217]: DEBUG oslo_vmware.api [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1115.697331] env[68217]: value = "task-2962027" [ 1115.697331] env[68217]: _type = "Task" [ 1115.697331] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.711061] env[68217]: DEBUG oslo_vmware.api [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962027, 'name': ResetVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1115.720770] env[68217]: DEBUG nova.compute.manager [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Received event network-changed-753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1115.720950] env[68217]: DEBUG nova.compute.manager [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing instance network info cache due to event network-changed-753c250a-569f-42f8-a9e7-fed02079c841. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1115.721204] env[68217]: DEBUG oslo_concurrency.lockutils [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] Acquiring lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.721318] env[68217]: DEBUG oslo_concurrency.lockutils [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] Acquired lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.721476] env[68217]: DEBUG nova.network.neutron [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Refreshing network info cache for port 753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1115.921709] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962021, 'name': CreateVM_Task, 'duration_secs': 1.726391} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1115.921975] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1115.923078] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1115.923259] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1115.923577] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1115.923853] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2fafd6e9-1cbd-4fec-8adf-051ba828766d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.929934] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1115.929934] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a92701-46d0-3da6-a8bd-4b4d16773823" [ 1115.929934] env[68217]: _type = "Task" [ 1115.929934] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1115.942520] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a92701-46d0-3da6-a8bd-4b4d16773823, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.010028] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962023, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.143487} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.013299] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1116.013709] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962024, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.13571} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.014489] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-104a471e-8152-424a-a122-eaceb6db9dbb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.017019] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1116.017850] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5847cb37-ae7d-432d-9b1e-eafae78b5919 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.042481] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Reconfiguring VM instance instance-00000072 to attach disk [datastore2] 309aa787-aa7d-4508-bf90-499958747c46/309aa787-aa7d-4508-bf90-499958747c46.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.052103] env[68217]: DEBUG nova.network.neutron [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Updating instance_info_cache with network_info: [{"id": "a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a", "address": "fa:16:3e:66:3e:80", "network": {"id": "790d65e1-d231-4cf6-82f6-3b3bbd954dbd", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-283646446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "132f93394d0e4acca4a3201b049e8538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0dbcb6d-8e", "ovs_interfaceid": "a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.053438] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0ab01f33-91c2-4ba5-b00d-ce1d37a43abb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.077074] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.078524] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9e01f749-af56-45c7-b4f0-867ca22651c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.102142] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1116.102142] env[68217]: value = "task-2962029" [ 1116.102142] env[68217]: _type = "Task" [ 1116.102142] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.103942] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1116.103942] env[68217]: value = "task-2962028" [ 1116.103942] env[68217]: _type = "Task" [ 1116.103942] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.117425] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962028, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.121739] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962029, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.212014] env[68217]: DEBUG oslo_vmware.api [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962027, 'name': ResetVM_Task, 'duration_secs': 0.10088} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.212014] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Did hard reboot of VM {{(pid=68217) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 1116.212014] env[68217]: DEBUG nova.compute.manager [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1116.212571] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e715a3e-118c-4cc0-bf1b-aa6f5b4e0193 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.445671] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a92701-46d0-3da6-a8bd-4b4d16773823, 'name': SearchDatastore_Task, 'duration_secs': 0.021808} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.446223] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.446676] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1116.446766] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1116.446961] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.447260] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1116.447551] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5895e1f2-64ce-4023-bb49-f61bf133387d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.463095] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1116.463394] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1116.464491] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b519608c-5b28-438f-89de-6698ecdd9ace {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.472336] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1116.472336] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527d90a0-ba9b-9eac-b1ac-19b1e0873521" [ 1116.472336] env[68217]: _type = "Task" [ 1116.472336] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.483629] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527d90a0-ba9b-9eac-b1ac-19b1e0873521, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.515623] env[68217]: DEBUG nova.network.neutron [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updated VIF entry in instance network info cache for port 753c250a-569f-42f8-a9e7-fed02079c841. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1116.516069] env[68217]: DEBUG nova.network.neutron [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [{"id": "753c250a-569f-42f8-a9e7-fed02079c841", "address": "fa:16:3e:ee:87:7a", "network": {"id": "8ad06dc6-c950-487f-b4ea-fe9364f8548d", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-897727038-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1424003d74424a9e84d15879f2e634e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0e00b2f1-c70f-4b21-86eb-810643cc1680", "external-id": "nsx-vlan-transportzone-487", "segmentation_id": 487, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap753c250a-56", "ovs_interfaceid": "753c250a-569f-42f8-a9e7-fed02079c841", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1116.578474] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Releasing lock "refresh_cache-84f7ae5d-abbd-4102-b4a9-5468e0edefc6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1116.578816] env[68217]: DEBUG nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Instance network_info: |[{"id": "a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a", "address": "fa:16:3e:66:3e:80", "network": {"id": "790d65e1-d231-4cf6-82f6-3b3bbd954dbd", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-283646446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "132f93394d0e4acca4a3201b049e8538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0dbcb6d-8e", "ovs_interfaceid": "a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1116.579147] env[68217]: DEBUG oslo_concurrency.lockutils [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] Acquired lock "refresh_cache-84f7ae5d-abbd-4102-b4a9-5468e0edefc6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1116.579400] env[68217]: DEBUG nova.network.neutron [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Refreshing network info cache for port a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1116.580701] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:66:3e:80', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'eb2eaecd-9701-4504-9fcb-fb1a420ead72', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1116.588175] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Creating folder: Project (132f93394d0e4acca4a3201b049e8538). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1116.589337] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-bd8a2f22-e318-4f12-ab93-4ef4b72d1755 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.603251] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Created folder: Project (132f93394d0e4acca4a3201b049e8538) in parent group-v594094. [ 1116.603448] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Creating folder: Instances. Parent ref: group-v594400. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1116.603747] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a25ea4f6-bb24-4f93-aa9d-2b1253f19768 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.618959] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962029, 'name': ReconfigVM_Task, 'duration_secs': 0.390939} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.623670] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfigured VM instance instance-00000068 to attach disk [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435/a4dcc7fb-83e4-4bb9-9c98-9569daee1435.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1116.623670] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Block device information present: {'root_device_name': '/dev/sda', 'image': [{'encryption_format': None, 'device_type': 'disk', 'size': 0, 'encryption_options': None, 'encryption_secret_uuid': None, 'guest_format': None, 'encrypted': False, 'boot_index': 0, 'disk_bus': None, 'device_name': '/dev/sda', 'image_id': '575ba628-84b6-4b0c-98ba-305166627d10'}], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sdb', 'attachment_id': '5aae7315-a61c-42bc-8565-ea6d0876d77e', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'}, 'guest_format': None, 'delete_on_termination': False, 'boot_index': None, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=68217) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1116.623670] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1116.623670] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1116.623670] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962028, 'name': ReconfigVM_Task, 'duration_secs': 0.38926} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.625329] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b4c0c2a-09ed-4d54-b27c-657324ac3382 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.628176] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Reconfigured VM instance instance-00000072 to attach disk [datastore2] 309aa787-aa7d-4508-bf90-499958747c46/309aa787-aa7d-4508-bf90-499958747c46.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1116.628841] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Created folder: Instances in parent group-v594400. [ 1116.629132] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1116.629733] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-98de7a8e-1b10-4d2e-a6b4-718a63077f8b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.631436] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1116.632145] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-20e01e77-c1d7-43bb-9237-9a53593c5f45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.660263] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e93bebc-c703-49cc-87d4-f1f6bf919432 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.664799] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1116.664799] env[68217]: value = "task-2962034" [ 1116.664799] env[68217]: _type = "Task" [ 1116.664799] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.670305] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1116.670305] env[68217]: value = "task-2962033" [ 1116.670305] env[68217]: _type = "Task" [ 1116.670305] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.691245] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfiguring VM instance instance-00000068 to attach disk [datastore2] volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d/volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1116.695862] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd0f9bfd-46d2-485c-86cf-7c1cbc927e17 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.716159] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962034, 'name': CreateVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.723296] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962033, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.727958] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f04b126c-b29c-4f1f-9bf3-a5f64c0c2a05 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 4.451s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1116.730652] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1116.730652] env[68217]: value = "task-2962035" [ 1116.730652] env[68217]: _type = "Task" [ 1116.730652] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1116.742534] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962035, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1116.987503] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527d90a0-ba9b-9eac-b1ac-19b1e0873521, 'name': SearchDatastore_Task, 'duration_secs': 0.02035} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1116.987503] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-76e5d6d1-61da-4b24-9c4f-5e666d890a7b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.995655] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1116.995655] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52823a03-c285-7f2a-1a4f-2bdbf873d0b2" [ 1116.995655] env[68217]: _type = "Task" [ 1116.995655] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.005907] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52823a03-c285-7f2a-1a4f-2bdbf873d0b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.018891] env[68217]: DEBUG oslo_concurrency.lockutils [req-c101daea-2a37-4684-964d-2134fb6b2980 req-27595000-ed6e-4c25-82a8-a220dde3cd4b service nova] Releasing lock "refresh_cache-6b4dff91-254e-43cc-85cf-7de6214dcafd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.178635] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962034, 'name': CreateVM_Task, 'duration_secs': 0.429173} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.178805] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1117.179560] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.179847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.180455] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1117.180728] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03e6e828-8df3-49b5-a3ba-d95bf07fd93b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.189601] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1117.189601] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]525de2b3-23a9-2f13-22df-6482f819b3b0" [ 1117.189601] env[68217]: _type = "Task" [ 1117.189601] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.199033] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962033, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.205114] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]525de2b3-23a9-2f13-22df-6482f819b3b0, 'name': SearchDatastore_Task, 'duration_secs': 0.013956} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.205746] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.206377] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1117.206377] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.243198] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962035, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.336997] env[68217]: DEBUG nova.network.neutron [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Updated VIF entry in instance network info cache for port a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1117.337458] env[68217]: DEBUG nova.network.neutron [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Updating instance_info_cache with network_info: [{"id": "a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a", "address": "fa:16:3e:66:3e:80", "network": {"id": "790d65e1-d231-4cf6-82f6-3b3bbd954dbd", "bridge": "br-int", "label": "tempest-ServersNegativeTestMultiTenantJSON-283646446-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "132f93394d0e4acca4a3201b049e8538", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "eb2eaecd-9701-4504-9fcb-fb1a420ead72", "external-id": "nsx-vlan-transportzone-433", "segmentation_id": 433, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0dbcb6d-8e", "ovs_interfaceid": "a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1117.506666] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52823a03-c285-7f2a-1a4f-2bdbf873d0b2, 'name': SearchDatastore_Task, 'duration_secs': 0.01609} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.506967] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1117.507374] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1d8973e7-1da3-4c17-9516-007b2356854f/1d8973e7-1da3-4c17-9516-007b2356854f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1117.507697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.507907] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1117.508184] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f8b0508d-8754-4328-909e-5c49c516f999 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.510442] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8c8e60e3-623a-40c3-a23a-b215cb4cae45 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.519041] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1117.519041] env[68217]: value = "task-2962036" [ 1117.519041] env[68217]: _type = "Task" [ 1117.519041] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.524718] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1117.525023] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1117.526428] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-86dedcf9-f057-4332-8140-0c923af28f5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.532939] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962036, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.537486] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1117.537486] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cc9144-4b52-1acd-9330-6bee5a9d1d1f" [ 1117.537486] env[68217]: _type = "Task" [ 1117.537486] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.546975] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cc9144-4b52-1acd-9330-6bee5a9d1d1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.693966] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962033, 'name': Rename_Task, 'duration_secs': 0.934425} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1117.694447] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1117.694796] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-18a0b431-77ab-4e0d-997e-a37c9ad1a80a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.703735] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1117.703735] env[68217]: value = "task-2962037" [ 1117.703735] env[68217]: _type = "Task" [ 1117.703735] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1117.714771] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962037, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.726280] env[68217]: DEBUG nova.compute.manager [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Received event network-changed-ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1117.726591] env[68217]: DEBUG nova.compute.manager [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Refreshing instance network info cache due to event network-changed-ccbc8261-8d58-4e71-9a59-ac46dac31267. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1117.726892] env[68217]: DEBUG oslo_concurrency.lockutils [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] Acquiring lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1117.727116] env[68217]: DEBUG oslo_concurrency.lockutils [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] Acquired lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1117.727426] env[68217]: DEBUG nova.network.neutron [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Refreshing network info cache for port ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1117.747093] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962035, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1117.840745] env[68217]: DEBUG oslo_concurrency.lockutils [req-6e5a5da4-c619-4f53-91f1-dead53c8f5b0 req-69cad3d8-56e1-4b7f-97e1-1f38fa4da9b2 service nova] Releasing lock "refresh_cache-84f7ae5d-abbd-4102-b4a9-5468e0edefc6" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.031126] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962036, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.048682] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cc9144-4b52-1acd-9330-6bee5a9d1d1f, 'name': SearchDatastore_Task, 'duration_secs': 0.011949} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.049620] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18a5c2a5-de41-4331-94fb-951806757771 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.057375] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1118.057375] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e1f44d-5f3b-7d73-9419-7ebd3dc3218e" [ 1118.057375] env[68217]: _type = "Task" [ 1118.057375] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.066348] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e1f44d-5f3b-7d73-9419-7ebd3dc3218e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.216504] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962037, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.245380] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962035, 'name': ReconfigVM_Task, 'duration_secs': 1.334295} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.245380] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfigured VM instance instance-00000068 to attach disk [datastore2] volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d/volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1118.250887] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca98c6de-ad4f-471e-9094-436cd2281fb2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.267468] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1118.267468] env[68217]: value = "task-2962038" [ 1118.267468] env[68217]: _type = "Task" [ 1118.267468] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.280051] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962038, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.509963] env[68217]: DEBUG nova.network.neutron [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updated VIF entry in instance network info cache for port ccbc8261-8d58-4e71-9a59-ac46dac31267. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1118.510348] env[68217]: DEBUG nova.network.neutron [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updating instance_info_cache with network_info: [{"id": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "address": "fa:16:3e:79:a8:c2", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.137", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapccbc8261-8d", "ovs_interfaceid": "ccbc8261-8d58-4e71-9a59-ac46dac31267", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1118.531904] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962036, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.522089} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.532185] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 1d8973e7-1da3-4c17-9516-007b2356854f/1d8973e7-1da3-4c17-9516-007b2356854f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1118.532402] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1118.532705] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f006ffff-d3ea-4940-939d-73518fca2c1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.540601] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1118.540601] env[68217]: value = "task-2962039" [ 1118.540601] env[68217]: _type = "Task" [ 1118.540601] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.549154] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962039, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.572970] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e1f44d-5f3b-7d73-9419-7ebd3dc3218e, 'name': SearchDatastore_Task, 'duration_secs': 0.01474} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.573263] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1118.573534] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 84f7ae5d-abbd-4102-b4a9-5468e0edefc6/84f7ae5d-abbd-4102-b4a9-5468e0edefc6.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1118.573808] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d173be12-47a7-4947-b1a0-bb529b2982ac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.581958] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1118.581958] env[68217]: value = "task-2962040" [ 1118.581958] env[68217]: _type = "Task" [ 1118.581958] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.591233] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962040, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.716270] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962037, 'name': PowerOnVM_Task, 'duration_secs': 0.805865} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1118.716607] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1118.716860] env[68217]: INFO nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Took 10.09 seconds to spawn the instance on the hypervisor. [ 1118.717222] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1118.718151] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e01a167c-d33d-4f0c-a299-e1f81489f134 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.784096] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962038, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1118.853266] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.853429] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Cleaning up deleted instances {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1118.906626] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1118.906626] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594399', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'name': 'volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'serial': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1118.907471] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b105a5-7db1-4b3c-a949-1651c4e5f5e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.925504] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef54bf76-5244-4cf7-b9eb-0c054054028a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.959919] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334/volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1118.960304] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b79a4764-d6dd-4886-b729-315ed31ad3fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1118.983185] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1118.983185] env[68217]: value = "task-2962041" [ 1118.983185] env[68217]: _type = "Task" [ 1118.983185] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1118.999120] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962041, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.013186] env[68217]: DEBUG oslo_concurrency.lockutils [req-bf597eab-bd48-49e2-a3f3-87229922b5e1 req-9f306381-76c0-457d-a484-55c365f615dd service nova] Releasing lock "refresh_cache-759149be-178f-4238-b9c3-c316d060d6be" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1119.054865] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962039, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.244798} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.055362] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1119.056319] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d7b196b-858c-486d-b146-1e7367606192 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.083174] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Reconfiguring VM instance instance-00000073 to attach disk [datastore1] 1d8973e7-1da3-4c17-9516-007b2356854f/1d8973e7-1da3-4c17-9516-007b2356854f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1119.083634] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-28ab3c58-8ed3-4414-a4a8-1ed94b0ac243 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.112158] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962040, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.113946] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1119.113946] env[68217]: value = "task-2962042" [ 1119.113946] env[68217]: _type = "Task" [ 1119.113946] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.125468] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962042, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.235928] env[68217]: INFO nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Took 15.08 seconds to build instance. [ 1119.281760] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962038, 'name': ReconfigVM_Task, 'duration_secs': 0.868674} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.282051] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1119.282657] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9245891-ef70-4efc-aaa4-2851531bd09d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.290112] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1119.290112] env[68217]: value = "task-2962043" [ 1119.290112] env[68217]: _type = "Task" [ 1119.290112] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.299841] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962043, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.371648] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] There are 58 instances to clean {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1119.371863] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 38ca1fd4-c9f5-44be-8efd-8e9843c8d4a9] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1119.493599] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962041, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.594359] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962040, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.657762} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.594768] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 84f7ae5d-abbd-4102-b4a9-5468e0edefc6/84f7ae5d-abbd-4102-b4a9-5468e0edefc6.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1119.595119] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1119.595418] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c0fe3f64-6561-4739-8b7e-17975902c2c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.605413] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1119.605413] env[68217]: value = "task-2962044" [ 1119.605413] env[68217]: _type = "Task" [ 1119.605413] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.614285] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.623390] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962042, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.738491] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "309aa787-aa7d-4508-bf90-499958747c46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.593s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1119.801197] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962043, 'name': Rename_Task, 'duration_secs': 0.284997} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.801424] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1119.801684] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fb1ed677-d0c2-480c-a703-fb74d101b08b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1119.809272] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1119.809272] env[68217]: value = "task-2962045" [ 1119.809272] env[68217]: _type = "Task" [ 1119.809272] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1119.817866] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962045, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1119.875640] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 8f0d5766-005e-459d-b9f4-e46b435e43b8] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1119.999243] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962041, 'name': ReconfigVM_Task, 'duration_secs': 0.612251} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1119.999722] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334/volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1120.007108] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c99aeb30-bf0a-4e61-bee1-5cf997c7285e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.026239] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1120.026239] env[68217]: value = "task-2962046" [ 1120.026239] env[68217]: _type = "Task" [ 1120.026239] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.037976] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962046, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.117121] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962044, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.125106] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962042, 'name': ReconfigVM_Task, 'duration_secs': 0.572874} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.125390] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Reconfigured VM instance instance-00000073 to attach disk [datastore1] 1d8973e7-1da3-4c17-9516-007b2356854f/1d8973e7-1da3-4c17-9516-007b2356854f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1120.126021] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d18f6cd4-3aee-4f44-9c99-a7c523adf6b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.133493] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1120.133493] env[68217]: value = "task-2962047" [ 1120.133493] env[68217]: _type = "Task" [ 1120.133493] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.143238] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962047, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.320962] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962045, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.379907] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e3dfe047-7cdc-4a1d-8af3-6437b5555ac4] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1120.539669] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962046, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.617879] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962044, 'name': ExtendVirtualDisk_Task} progress is 50%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.647156] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962047, 'name': Rename_Task, 'duration_secs': 0.249011} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.647551] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1120.647903] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c0a18e8f-a713-4da5-b47f-829f2c0c9a32 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.658024] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1120.658024] env[68217]: value = "task-2962048" [ 1120.658024] env[68217]: _type = "Task" [ 1120.658024] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1120.672368] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962048, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1120.823576] env[68217]: DEBUG oslo_vmware.api [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962045, 'name': PowerOnVM_Task, 'duration_secs': 0.927361} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1120.824954] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1120.824954] env[68217]: DEBUG nova.compute.manager [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1120.825226] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57953635-8d10-4598-a7a9-30ff0c00b98d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1120.882944] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 08ba7156-1c6d-4385-939c-bdd575c7fda3] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1121.040960] env[68217]: DEBUG oslo_vmware.api [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962046, 'name': ReconfigVM_Task, 'duration_secs': 0.6666} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.041427] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594399', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'name': 'volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'serial': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1121.117637] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962044, 'name': ExtendVirtualDisk_Task, 'duration_secs': 1.042895} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.118073] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1121.118735] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a66961cf-98cf-43d2-961e-0c5e79390ca6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.142387] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Reconfiguring VM instance instance-00000074 to attach disk [datastore1] 84f7ae5d-abbd-4102-b4a9-5468e0edefc6/84f7ae5d-abbd-4102-b4a9-5468e0edefc6.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1121.143039] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c3f994d-dc78-4e61-b9cc-ae46116a9d3e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.167671] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962048, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.169126] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1121.169126] env[68217]: value = "task-2962049" [ 1121.169126] env[68217]: _type = "Task" [ 1121.169126] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.178817] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962049, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.347435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1121.347763] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1121.348127] env[68217]: DEBUG nova.objects.instance [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1121.386563] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 1faf45fb-a3b0-4647-b63d-3f51695b6171] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1121.668920] env[68217]: DEBUG oslo_vmware.api [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962048, 'name': PowerOnVM_Task, 'duration_secs': 0.704568} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.669218] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1121.669492] env[68217]: INFO nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Took 10.71 seconds to spawn the instance on the hypervisor. [ 1121.669702] env[68217]: DEBUG nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1121.670616] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74339829-63d1-467d-906d-00f948fe8763 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.684223] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962049, 'name': ReconfigVM_Task, 'duration_secs': 0.448261} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1121.686061] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Reconfigured VM instance instance-00000074 to attach disk [datastore1] 84f7ae5d-abbd-4102-b4a9-5468e0edefc6/84f7ae5d-abbd-4102-b4a9-5468e0edefc6.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1121.689342] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-927ef97b-8456-43b1-87ff-574a64b20cfa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1121.698806] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1121.698806] env[68217]: value = "task-2962050" [ 1121.698806] env[68217]: _type = "Task" [ 1121.698806] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1121.707884] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962050, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1121.890582] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: ffff4cf4-f663-4965-84d1-8351bfde1252] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1122.085431] env[68217]: DEBUG nova.objects.instance [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1122.202816] env[68217]: INFO nova.compute.manager [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Took 17.99 seconds to build instance. [ 1122.210952] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962050, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.358886] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d5f44ab0-01f6-4947-b978-ef2f6ea4a90b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.011s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.394107] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 33802025-7f72-4ad9-80fe-b15196b1a577] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1122.591437] env[68217]: DEBUG oslo_concurrency.lockutils [None req-475d20a2-873c-499e-9f02-794f8aaa07cc tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.305s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.705414] env[68217]: DEBUG oslo_concurrency.lockutils [None req-204432b4-c914-4590-8526-d209d21c55bf tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "1d8973e7-1da3-4c17-9516-007b2356854f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.506s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1122.710706] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962050, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1122.898442] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: df4c3a34-2dea-4f82-9ea6-7a9eb1c03179] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1123.211373] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962050, 'name': Rename_Task, 'duration_secs': 1.17631} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1123.211717] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1123.211854] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-178d2992-108e-4758-ba4d-b6dec4b62ac1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.220516] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1123.220516] env[68217]: value = "task-2962051" [ 1123.220516] env[68217]: _type = "Task" [ 1123.220516] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.229698] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962051, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.316566] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "309aa787-aa7d-4508-bf90-499958747c46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.316886] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "309aa787-aa7d-4508-bf90-499958747c46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.317171] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "309aa787-aa7d-4508-bf90-499958747c46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.317451] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "309aa787-aa7d-4508-bf90-499958747c46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.317643] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "309aa787-aa7d-4508-bf90-499958747c46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.319828] env[68217]: INFO nova.compute.manager [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Terminating instance [ 1123.387891] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "1d8973e7-1da3-4c17-9516-007b2356854f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.388231] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "1d8973e7-1da3-4c17-9516-007b2356854f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.388643] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "1d8973e7-1da3-4c17-9516-007b2356854f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.388643] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "1d8973e7-1da3-4c17-9516-007b2356854f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.388795] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "1d8973e7-1da3-4c17-9516-007b2356854f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1123.391115] env[68217]: INFO nova.compute.manager [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Terminating instance [ 1123.400769] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 34f176e7-f98e-4eda-aee9-45e44d5ffb85] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1123.471871] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1123.472249] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1123.732393] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962051, 'name': PowerOnVM_Task} progress is 89%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.824645] env[68217]: DEBUG nova.compute.manager [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1123.825242] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1123.826680] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-335f6218-7388-485e-9cc1-af6247795ba2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.836737] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1123.837059] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0673e5c2-1cc3-49c4-aade-312d21d3df5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.844910] env[68217]: DEBUG oslo_vmware.api [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1123.844910] env[68217]: value = "task-2962052" [ 1123.844910] env[68217]: _type = "Task" [ 1123.844910] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.853810] env[68217]: DEBUG oslo_vmware.api [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962052, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.891781] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f2353-b41d-1ed3-e225-f251794b3122/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1123.892837] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5592696-b21d-4988-aa30-9de27bdb87c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.897097] env[68217]: DEBUG nova.compute.manager [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1123.897519] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1123.898373] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35be6869-9f9a-45b4-8665-67ce7a331699 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.905829] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: d1fcac61-0d2a-4331-9042-af11c3c36ae4] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1123.907613] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f2353-b41d-1ed3-e225-f251794b3122/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1123.907769] env[68217]: ERROR oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f2353-b41d-1ed3-e225-f251794b3122/disk-0.vmdk due to incomplete transfer. [ 1123.910172] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8e40c000-acea-49d2-9871-378b0fd19e0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.911937] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1123.912242] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-3aac2fa4-5902-44e8-bce9-7c807150a62f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.919838] env[68217]: DEBUG oslo_vmware.api [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1123.919838] env[68217]: value = "task-2962053" [ 1123.919838] env[68217]: _type = "Task" [ 1123.919838] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.926328] env[68217]: DEBUG oslo_vmware.rw_handles [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/520f2353-b41d-1ed3-e225-f251794b3122/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1123.926538] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Uploaded image 5f518e85-1779-448c-b381-424d3d5af7dd to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1123.930289] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1123.935305] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-c2d18762-4097-4203-853b-4e0efebc2832 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1123.937918] env[68217]: DEBUG oslo_vmware.api [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962053, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.946077] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1123.946077] env[68217]: value = "task-2962054" [ 1123.946077] env[68217]: _type = "Task" [ 1123.946077] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1123.956502] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962054, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1123.976978] env[68217]: DEBUG nova.compute.utils [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1124.231764] env[68217]: DEBUG oslo_vmware.api [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962051, 'name': PowerOnVM_Task, 'duration_secs': 0.603037} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.232252] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1124.232252] env[68217]: INFO nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Took 10.88 seconds to spawn the instance on the hypervisor. [ 1124.232408] env[68217]: DEBUG nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1124.233186] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2380b0ab-154e-4f4b-96c2-033f8eea5587 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.355241] env[68217]: DEBUG oslo_vmware.api [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962052, 'name': PowerOffVM_Task, 'duration_secs': 0.500287} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.355554] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1124.355719] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1124.355982] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2da3934-ff62-46e2-84a4-0c0ff69a457a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.408797] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 66ca9efd-1839-4e98-b006-5fc3adda375d] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1124.433024] env[68217]: DEBUG oslo_vmware.api [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962053, 'name': PowerOffVM_Task, 'duration_secs': 0.448028} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.433962] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1124.434157] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1124.434427] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe948c20-60c3-41bf-b353-2badb5e42d3f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.445020] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1124.445387] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1124.445597] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleting the datastore file [datastore2] 309aa787-aa7d-4508-bf90-499958747c46 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1124.445954] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b53f2858-b483-441c-b531-f3f37015722b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.457146] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962054, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.458386] env[68217]: DEBUG oslo_vmware.api [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1124.458386] env[68217]: value = "task-2962057" [ 1124.458386] env[68217]: _type = "Task" [ 1124.458386] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.466515] env[68217]: DEBUG oslo_vmware.api [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962057, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.479696] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1124.507040] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1124.507435] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1124.507715] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleting the datastore file [datastore1] 1d8973e7-1da3-4c17-9516-007b2356854f {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1124.508094] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-084c5d35-51d0-411d-a6f5-59ae3fac7a42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1124.515603] env[68217]: DEBUG oslo_vmware.api [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for the task: (returnval){ [ 1124.515603] env[68217]: value = "task-2962058" [ 1124.515603] env[68217]: _type = "Task" [ 1124.515603] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1124.523832] env[68217]: DEBUG oslo_vmware.api [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962058, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.751994] env[68217]: INFO nova.compute.manager [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Took 20.16 seconds to build instance. [ 1124.911833] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 92f99a45-13a3-48d9-8dbc-4065cc8ee9dc] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1124.959828] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962054, 'name': Destroy_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1124.968937] env[68217]: DEBUG oslo_vmware.api [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962057, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.504809} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1124.969219] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1124.969407] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1124.969600] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1124.970032] env[68217]: INFO nova.compute.manager [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1124.970142] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1124.970393] env[68217]: DEBUG nova.compute.manager [-] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1124.970512] env[68217]: DEBUG nova.network.neutron [-] [instance: 309aa787-aa7d-4508-bf90-499958747c46] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1125.028303] env[68217]: DEBUG oslo_vmware.api [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Task: {'id': task-2962058, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.46089} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.028681] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1125.028946] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1125.029215] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1125.029480] env[68217]: INFO nova.compute.manager [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1125.029848] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1125.030180] env[68217]: DEBUG nova.compute.manager [-] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1125.030321] env[68217]: DEBUG nova.network.neutron [-] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1125.256365] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4eb2d7b8-c1e3-449f-9f2f-ac0f6f5a5438 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.671s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1125.415170] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 35c4ab95-fc14-4bd4-a2a5-64f15f070b88] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1125.461073] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962054, 'name': Destroy_Task, 'duration_secs': 1.04832} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1125.461073] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Destroyed the VM [ 1125.461073] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1125.461232] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-8e14c419-2a4c-4cf5-9865-6681f1280f58 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.469284] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1125.469284] env[68217]: value = "task-2962059" [ 1125.469284] env[68217]: _type = "Task" [ 1125.469284] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1125.480614] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962059, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1125.553709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.553935] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.554178] env[68217]: INFO nova.compute.manager [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Attaching volume e20c6f20-79fe-420d-b9df-0288702c53dc to /dev/sdc [ 1125.577036] env[68217]: DEBUG nova.compute.manager [req-54512eca-92c7-4ed4-8e8b-6605ac29f387 req-1406aec1-58a3-401a-b11b-3873c5df56eb service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Received event network-vif-deleted-626716af-4590-4f7f-a441-03ca790d82d4 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1125.577353] env[68217]: INFO nova.compute.manager [req-54512eca-92c7-4ed4-8e8b-6605ac29f387 req-1406aec1-58a3-401a-b11b-3873c5df56eb service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Neutron deleted interface 626716af-4590-4f7f-a441-03ca790d82d4; detaching it from the instance and deleting it from the info cache [ 1125.577507] env[68217]: DEBUG nova.network.neutron [req-54512eca-92c7-4ed4-8e8b-6605ac29f387 req-1406aec1-58a3-401a-b11b-3873c5df56eb service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1125.586405] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4304d51-09b6-4921-84de-f080fc9ffa2a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.595133] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b2e61e3-7b11-4575-a53e-05ac8c83c2a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1125.609702] env[68217]: DEBUG nova.virt.block_device [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updating existing volume attachment record: d5e72b0e-8ed0-473c-a62f-8eee11d24fc2 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1125.918761] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 105e6181-19c4-466b-88a0-cdbca2cac230] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1125.938631] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-89136574-575c-47da-928c-bd7a5dbb3a98-96505674-7581-4b19-93ab-1b3fe17ed499" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1125.938914] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-89136574-575c-47da-928c-bd7a5dbb3a98-96505674-7581-4b19-93ab-1b3fe17ed499" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1125.939343] env[68217]: DEBUG nova.objects.instance [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'flavor' on Instance uuid 89136574-575c-47da-928c-bd7a5dbb3a98 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1125.980443] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962059, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.006621] env[68217]: DEBUG nova.network.neutron [-] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.042608] env[68217]: DEBUG nova.network.neutron [-] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1126.053793] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.054044] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.054267] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1126.054455] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1126.055050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1126.056788] env[68217]: INFO nova.compute.manager [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Terminating instance [ 1126.080407] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2b0f577-7abb-437d-842d-e6af7dadad3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.092194] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4043cd35-3bd4-45bb-abc4-7de41c6c920b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.135532] env[68217]: DEBUG nova.compute.manager [req-54512eca-92c7-4ed4-8e8b-6605ac29f387 req-1406aec1-58a3-401a-b11b-3873c5df56eb service nova] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Detach interface failed, port_id=626716af-4590-4f7f-a441-03ca790d82d4, reason: Instance 309aa787-aa7d-4508-bf90-499958747c46 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1126.422221] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 01f97d0d-df21-441c-9dc6-5c51e3798d81] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1126.481899] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962059, 'name': RemoveSnapshot_Task, 'duration_secs': 0.657426} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1126.482194] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1126.482287] env[68217]: DEBUG nova.compute.manager [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1126.483072] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89fbc2ad-ee66-42e3-8870-ca4d40c7356a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.509405] env[68217]: INFO nova.compute.manager [-] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Took 1.54 seconds to deallocate network for instance. [ 1126.545445] env[68217]: INFO nova.compute.manager [-] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Took 1.52 seconds to deallocate network for instance. [ 1126.560321] env[68217]: DEBUG nova.compute.manager [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1126.560434] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1126.561687] env[68217]: DEBUG nova.objects.instance [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'pci_requests' on Instance uuid 89136574-575c-47da-928c-bd7a5dbb3a98 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1126.563410] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32d7bee7-7aeb-44d7-8e0d-4f3ae82ec023 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.572654] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1126.572909] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-58b04655-8690-438e-861c-9501c57df95d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1126.581107] env[68217]: DEBUG oslo_vmware.api [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1126.581107] env[68217]: value = "task-2962061" [ 1126.581107] env[68217]: _type = "Task" [ 1126.581107] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1126.589251] env[68217]: DEBUG oslo_vmware.api [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962061, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1126.925933] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 776798bf-1ad4-4acb-ac58-cacc5493e1c7] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1126.997092] env[68217]: INFO nova.compute.manager [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Shelve offloading [ 1127.015665] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.015939] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1127.016171] env[68217]: DEBUG nova.objects.instance [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lazy-loading 'resources' on Instance uuid 309aa787-aa7d-4508-bf90-499958747c46 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1127.052028] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1127.066945] env[68217]: DEBUG nova.objects.base [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Object Instance<89136574-575c-47da-928c-bd7a5dbb3a98> lazy-loaded attributes: flavor,pci_requests {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1127.067152] env[68217]: DEBUG nova.network.neutron [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1127.090904] env[68217]: DEBUG oslo_vmware.api [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962061, 'name': PowerOffVM_Task, 'duration_secs': 0.167192} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1127.091162] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1127.091330] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1127.091567] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cba2c21d-493c-4f95-8b98-14121aca3e87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.128393] env[68217]: DEBUG nova.policy [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1127.151111] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1127.151349] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1127.151534] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Deleting the datastore file [datastore1] 84f7ae5d-abbd-4102-b4a9-5468e0edefc6 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1127.151802] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-645f4e2e-d292-4257-aa79-6d7b4d545e08 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.159092] env[68217]: DEBUG oslo_vmware.api [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for the task: (returnval){ [ 1127.159092] env[68217]: value = "task-2962063" [ 1127.159092] env[68217]: _type = "Task" [ 1127.159092] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.167164] env[68217]: DEBUG oslo_vmware.api [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.429841] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 4a555172-a2a3-410b-a0fe-38964cee9a22] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1127.500875] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1127.501175] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1ab16cb4-631e-48d8-95e5-52176b76e581 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.509105] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1127.509105] env[68217]: value = "task-2962064" [ 1127.509105] env[68217]: _type = "Task" [ 1127.509105] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1127.521205] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1127.521621] env[68217]: DEBUG nova.compute.manager [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1127.522159] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-247023ef-d483-4296-ad78-ec4f8c5f2322 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.530900] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1127.530983] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1127.531205] env[68217]: DEBUG nova.network.neutron [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1127.599789] env[68217]: DEBUG nova.compute.manager [req-c9cec4b5-e7a2-4651-be43-086efc807f31 req-2bcb1bdc-50b6-49d3-af0e-7ff72b18c32c service nova] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Received event network-vif-deleted-3066fae5-f17e-4a05-8512-540ca8da5bcf {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1127.668717] env[68217]: DEBUG oslo_vmware.api [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962063, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1127.887349] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b58398f5-6437-43f9-b255-f1c932a5864e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.895468] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98454e7e-1c6a-4a95-8015-3426798c2071 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.924942] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ff875c7-c583-4db5-a372-06bb159b3cd8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.933057] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8ff58bb-7320-445c-b450-4f34f2e06943 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1127.937264] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 1f99ace3-1c5b-46ce-bb9c-74e139519da7] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1127.949050] env[68217]: DEBUG nova.compute.provider_tree [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1128.171099] env[68217]: DEBUG oslo_vmware.api [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Task: {'id': task-2962063, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.636814} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1128.171357] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1128.171538] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1128.171711] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1128.171881] env[68217]: INFO nova.compute.manager [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Took 1.61 seconds to destroy the instance on the hypervisor. [ 1128.172138] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1128.172324] env[68217]: DEBUG nova.compute.manager [-] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1128.172419] env[68217]: DEBUG nova.network.neutron [-] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1128.305723] env[68217]: DEBUG nova.network.neutron [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.440854] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 04149a5c-d1b5-4d71-a1ca-44696506a40d] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1128.452416] env[68217]: DEBUG nova.scheduler.client.report [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1128.674291] env[68217]: DEBUG nova.network.neutron [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Successfully updated port: 96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1128.808084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1128.944109] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: fab7d1eb-ef05-4498-aa6d-a524c3bb59c8] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1128.958620] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.943s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1128.960964] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.909s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1128.961230] env[68217]: DEBUG nova.objects.instance [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lazy-loading 'resources' on Instance uuid 1d8973e7-1da3-4c17-9516-007b2356854f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1128.964244] env[68217]: DEBUG nova.network.neutron [-] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1128.984862] env[68217]: INFO nova.scheduler.client.report [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted allocations for instance 309aa787-aa7d-4508-bf90-499958747c46 [ 1129.135168] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1129.136084] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66eb63f-9e2a-4b2a-a36b-dc48dba68ec0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.144553] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1129.144806] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b2ba6126-4dec-48f1-bd72-19f88200a6da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.177068] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.177350] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1129.177557] env[68217]: DEBUG nova.network.neutron [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1129.210664] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1129.210926] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1129.211191] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleting the datastore file [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1129.211460] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c1db177d-2885-49bd-ae4e-0874accaff51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.218537] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1129.218537] env[68217]: value = "task-2962067" [ 1129.218537] env[68217]: _type = "Task" [ 1129.218537] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1129.227460] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962067, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1129.448052] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 213b720b-b782-41c4-b60d-ef0af4b62932] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1129.469451] env[68217]: INFO nova.compute.manager [-] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Took 1.30 seconds to deallocate network for instance. [ 1129.492554] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5a815669-1344-480b-9c9f-f94e2a6b2d4d tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "309aa787-aa7d-4508-bf90-499958747c46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.176s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.630286] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Received event network-vif-deleted-a0dbcb6d-8e89-4e49-a2d1-3dccf029a33a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1129.630494] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-vif-plugged-96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1129.630674] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1129.630869] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1129.631182] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1129.631457] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] No waiting events found dispatching network-vif-plugged-96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1129.631633] env[68217]: WARNING nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received unexpected event network-vif-plugged-96505674-7581-4b19-93ab-1b3fe17ed499 for instance with vm_state active and task_state None. [ 1129.631792] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-changed-96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1129.631938] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing instance network info cache due to event network-changed-96505674-7581-4b19-93ab-1b3fe17ed499. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1129.632114] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1129.634305] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f522f4a-9e97-495c-97ce-25f6263a7a68 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.643627] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b381292f-6870-4526-b834-2b9d9e80fa91 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.678453] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac996451-f227-4e43-a5d4-ce23d80caa8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.687958] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf4f856-1418-4b44-8388-b9fce26ca169 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1129.701900] env[68217]: DEBUG nova.compute.provider_tree [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1129.728898] env[68217]: DEBUG oslo_vmware.api [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962067, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127117} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1129.729310] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1129.729357] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1129.729625] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1129.750392] env[68217]: INFO nova.scheduler.client.report [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted allocations for instance 815d1801-fa07-4466-850d-b1a36d630d46 [ 1129.946361] env[68217]: WARNING nova.network.neutron [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] 246af4c9-69b4-4542-84b9-2afe67cf297a already exists in list: networks containing: ['246af4c9-69b4-4542-84b9-2afe67cf297a']. ignoring it [ 1129.950867] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 4366c94c-164d-4cb9-8f04-7f26db4c0d3c] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1129.976934] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.153705] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1130.154145] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594403', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'name': 'volume-e20c6f20-79fe-420d-b9df-0288702c53dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'serial': 'e20c6f20-79fe-420d-b9df-0288702c53dc'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1130.155059] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17ebf9e-d7bf-4279-adc6-7d5569e65841 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.174177] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab9ff008-b056-4145-a831-7a7423e8879c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.202286] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] volume-e20c6f20-79fe-420d-b9df-0288702c53dc/volume-e20c6f20-79fe-420d-b9df-0288702c53dc.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1130.205164] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-41dd7c95-2334-4dd0-9f4c-4c6e666033a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.219928] env[68217]: DEBUG nova.scheduler.client.report [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1130.230111] env[68217]: DEBUG oslo_vmware.api [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1130.230111] env[68217]: value = "task-2962068" [ 1130.230111] env[68217]: _type = "Task" [ 1130.230111] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.240302] env[68217]: DEBUG oslo_vmware.api [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962068, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.257253] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1130.290635] env[68217]: DEBUG nova.network.neutron [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "96505674-7581-4b19-93ab-1b3fe17ed499", "address": "fa:16:3e:be:61:78", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96505674-75", "ovs_interfaceid": "96505674-7581-4b19-93ab-1b3fe17ed499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1130.454202] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 35817c87-0c55-49bd-917a-59bd39de663c] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1130.725475] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1130.727885] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.752s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1130.728134] env[68217]: DEBUG nova.objects.instance [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lazy-loading 'resources' on Instance uuid 84f7ae5d-abbd-4102-b4a9-5468e0edefc6 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1130.741373] env[68217]: DEBUG oslo_vmware.api [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962068, 'name': ReconfigVM_Task, 'duration_secs': 0.396304} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1130.741757] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfigured VM instance instance-0000006b to attach disk [datastore1] volume-e20c6f20-79fe-420d-b9df-0288702c53dc/volume-e20c6f20-79fe-420d-b9df-0288702c53dc.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1130.748403] env[68217]: INFO nova.scheduler.client.report [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Deleted allocations for instance 1d8973e7-1da3-4c17-9516-007b2356854f [ 1130.749912] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3c32b16-0099-4520-b8c7-57093316718b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.769848] env[68217]: DEBUG oslo_vmware.api [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1130.769848] env[68217]: value = "task-2962069" [ 1130.769848] env[68217]: _type = "Task" [ 1130.769848] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.781442] env[68217]: DEBUG oslo_vmware.api [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962069, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.793329] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1130.794368] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1130.794684] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.795176] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1130.795483] env[68217]: DEBUG nova.network.neutron [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing network info cache for port 96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1130.797927] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a75b83b7-19af-4059-8e08-4f0b541d4e4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.817874] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1130.818152] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1130.818283] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1130.818485] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1130.818650] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1130.818812] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1130.819052] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1130.819234] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1130.819421] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1130.819622] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1130.819819] env[68217]: DEBUG nova.virt.hardware [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1130.826234] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Reconfiguring VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1130.826788] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29bc4d88-69f5-4e63-a3de-bcfffb928f48 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1130.844645] env[68217]: DEBUG oslo_vmware.api [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1130.844645] env[68217]: value = "task-2962070" [ 1130.844645] env[68217]: _type = "Task" [ 1130.844645] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1130.852957] env[68217]: DEBUG oslo_vmware.api [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962070, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1130.957837] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 7371d4d3-e255-4a1f-8d5f-2ee1297e89d1] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1131.269038] env[68217]: DEBUG oslo_concurrency.lockutils [None req-95606ae6-d213-426e-bba4-fa6e8ff3611f tempest-MultipleCreateTestJSON-1339003622 tempest-MultipleCreateTestJSON-1339003622-project-member] Lock "1d8973e7-1da3-4c17-9516-007b2356854f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.881s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1131.281981] env[68217]: DEBUG oslo_vmware.api [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962069, 'name': ReconfigVM_Task, 'duration_secs': 0.143587} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.284710] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594403', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'name': 'volume-e20c6f20-79fe-420d-b9df-0288702c53dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'serial': 'e20c6f20-79fe-420d-b9df-0288702c53dc'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1131.357468] env[68217]: DEBUG oslo_vmware.api [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962070, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1131.442015] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-930e7e47-ddd0-4cee-b19f-3dfbaabfedeb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.455626] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a0e276-9c6e-4b67-add8-3d56db6cf2b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.461181] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 650ebd16-da81-475e-a82a-7fa5fb2880bc] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1131.516620] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792e217c-0567-4abb-904f-eceb346ac95b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.526195] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7294de5c-ab88-4387-8c56-4ca29440f937 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1131.541181] env[68217]: DEBUG nova.compute.provider_tree [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1131.592182] env[68217]: DEBUG nova.network.neutron [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updated VIF entry in instance network info cache for port 96505674-7581-4b19-93ab-1b3fe17ed499. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1131.592619] env[68217]: DEBUG nova.network.neutron [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "96505674-7581-4b19-93ab-1b3fe17ed499", "address": "fa:16:3e:be:61:78", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96505674-75", "ovs_interfaceid": "96505674-7581-4b19-93ab-1b3fe17ed499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1131.856044] env[68217]: DEBUG oslo_vmware.api [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962070, 'name': ReconfigVM_Task, 'duration_secs': 0.623236} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1131.856622] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1131.856834] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Reconfigured VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1131.964912] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: a7625a02-993b-4577-8d42-f763858a6154] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1132.045911] env[68217]: DEBUG nova.scheduler.client.report [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1132.095257] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1132.095611] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-vif-unplugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.095873] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1132.096156] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.096367] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.096577] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] No waiting events found dispatching network-vif-unplugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1132.096793] env[68217]: WARNING nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received unexpected event network-vif-unplugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 for instance with vm_state shelved and task_state shelving_offloading. [ 1132.097014] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1132.097219] env[68217]: DEBUG nova.compute.manager [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing instance network info cache due to event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1132.100523] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Acquiring lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1132.100523] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Acquired lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1132.100523] env[68217]: DEBUG nova.network.neutron [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1132.361546] env[68217]: DEBUG oslo_concurrency.lockutils [None req-341814fd-cedc-4489-b18f-4d89eec38a17 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-89136574-575c-47da-928c-bd7a5dbb3a98-96505674-7581-4b19-93ab-1b3fe17ed499" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 6.422s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.410343] env[68217]: DEBUG nova.objects.instance [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.468320] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: ca9ef7ff-b942-4363-a4f8-9163791ec162] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1132.551769] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.824s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.554151] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.297s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1132.554397] env[68217]: DEBUG nova.objects.instance [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'resources' on Instance uuid 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1132.583507] env[68217]: INFO nova.scheduler.client.report [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Deleted allocations for instance 84f7ae5d-abbd-4102-b4a9-5468e0edefc6 [ 1132.881077] env[68217]: DEBUG nova.network.neutron [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updated VIF entry in instance network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1132.881475] env[68217]: DEBUG nova.network.neutron [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": null, "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1132.916106] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4ed189b8-72eb-48cb-a71f-ad5a9ff434f6 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.362s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1132.972751] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: fe9cc3fa-fbf2-4d61-b4ba-8bedaa2943cb] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1133.057022] env[68217]: DEBUG nova.objects.instance [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'numa_topology' on Instance uuid 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1133.094826] env[68217]: DEBUG oslo_concurrency.lockutils [None req-652dfb2a-6078-4af1-b437-94452b0c8059 tempest-ServersNegativeTestMultiTenantJSON-990935715 tempest-ServersNegativeTestMultiTenantJSON-990935715-project-member] Lock "84f7ae5d-abbd-4102-b4a9-5468e0edefc6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.041s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1133.170851] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.171164] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.201174] env[68217]: DEBUG oslo_concurrency.lockutils [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.201405] env[68217]: DEBUG oslo_concurrency.lockutils [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.282603] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.384846] env[68217]: DEBUG oslo_concurrency.lockutils [req-f5ba80c3-46fa-4e3f-8b6a-8233e228ee3e req-4d17ffc8-0211-4901-b1ed-35817c0a137f service nova] Releasing lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1133.476055] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: a513976b-4859-4822-8989-c9452db62ee6] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1133.559394] env[68217]: DEBUG nova.objects.base [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Object Instance<815d1801-fa07-4466-850d-b1a36d630d46> lazy-loaded attributes: resources,numa_topology {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1133.677905] env[68217]: INFO nova.compute.manager [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Detaching volume a47bb4bc-3ddf-46eb-b753-4e8dcce58334 [ 1133.678808] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-89136574-575c-47da-928c-bd7a5dbb3a98-96505674-7581-4b19-93ab-1b3fe17ed499" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1133.679093] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-89136574-575c-47da-928c-bd7a5dbb3a98-96505674-7581-4b19-93ab-1b3fe17ed499" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1133.703810] env[68217]: DEBUG nova.compute.utils [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1133.713065] env[68217]: INFO nova.virt.block_device [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Attempting to driver detach volume a47bb4bc-3ddf-46eb-b753-4e8dcce58334 from mountpoint /dev/sdb [ 1133.713315] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1133.713503] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594399', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'name': 'volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'serial': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1133.714389] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9c02c2d-b46e-42f6-88e3-71012b9e3332 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.721595] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609c8078-de2a-4907-ad1c-1e509ca18b59 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.743799] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4accf54d-26bb-4b25-b41f-ff824097996d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.749751] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f07863f9-58c3-45e4-81fb-3f378f84c366 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.755172] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cf9932c-e46a-43fa-9c44-41da8d4a8686 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.782975] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9687fb-2221-4a28-85e2-94199381b877 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.805946] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f3540b-5bb1-4e4a-9cfe-6a10dcda2c0d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.811587] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a64e1334-b3d9-4064-a5ea-f64aa3fa06bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.825289] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] The volume has not been displaced from its original location: [datastore1] volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334/volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1133.830464] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1133.831085] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-010c107d-7074-407a-af61-1bf01ff6835b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.851603] env[68217]: DEBUG nova.compute.provider_tree [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1133.861018] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1133.861018] env[68217]: value = "task-2962071" [ 1133.861018] env[68217]: _type = "Task" [ 1133.861018] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.870997] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962071, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1133.979190] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 3af571ce-c400-45a1-97ad-4fbd53395129] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1134.184897] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1134.185241] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1134.186324] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe81e537-daa8-46e5-85d3-0da059bb94c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.219457] env[68217]: DEBUG oslo_concurrency.lockutils [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.018s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.221748] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a1ec268-d9a4-41e0-9fa6-34e3fdca1c65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.253430] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Reconfiguring VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1134.253737] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d9a73e1b-831d-4cef-9a5a-a1a6019b8f3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.272970] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1134.272970] env[68217]: value = "task-2962072" [ 1134.272970] env[68217]: _type = "Task" [ 1134.272970] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.280831] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.355082] env[68217]: DEBUG nova.scheduler.client.report [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1134.368717] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962071, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.482745] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 09a8469d-567c-4247-96eb-edf0f4040f65] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1134.786999] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.861359] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.306s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1134.875040] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962071, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.986135] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 62628aed-e2f9-478f-bed7-00757fc3c484] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1135.284318] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.301817] env[68217]: DEBUG oslo_concurrency.lockutils [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1135.302077] env[68217]: DEBUG oslo_concurrency.lockutils [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.302309] env[68217]: INFO nova.compute.manager [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Attaching volume a82505dd-3944-4311-bbf1-39afd9d16e72 to /dev/sdb [ 1135.346292] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7de9fce0-9f40-4963-bc9f-4bd7a5751234 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.354043] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ee9c42-34ad-43ad-9205-0c58635b725b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.372194] env[68217]: DEBUG nova.virt.block_device [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating existing volume attachment record: 1bf02b65-7d36-4415-a8b3-9864134cb579 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1135.378011] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4520c48a-c19e-4b22-b362-11d159afdb4f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 25.143s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1135.379072] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.097s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1135.380147] env[68217]: INFO nova.compute.manager [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Unshelving [ 1135.389186] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962071, 'name': ReconfigVM_Task, 'duration_secs': 1.293427} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.391843] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1135.397769] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d4bb0c07-e798-42b7-b33d-9f53e5b15c87 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.416590] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1135.416590] env[68217]: value = "task-2962073" [ 1135.416590] env[68217]: _type = "Task" [ 1135.416590] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1135.427223] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962073, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.490293] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 2612f6fc-a43f-4011-8a09-51088a49371a] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1135.786125] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1135.928811] env[68217]: DEBUG oslo_vmware.api [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962073, 'name': ReconfigVM_Task, 'duration_secs': 0.135374} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1135.930016] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594399', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'name': 'volume-a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334', 'serial': 'a47bb4bc-3ddf-46eb-b753-4e8dcce58334'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1135.996009] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: d3468ec2-6548-400a-b247-a6ab1156cab5] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1136.285846] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.406579] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1136.406858] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1136.407076] env[68217]: DEBUG nova.objects.instance [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'pci_requests' on Instance uuid 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.474013] env[68217]: DEBUG nova.objects.instance [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1136.501069] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 09290e60-7751-408e-9d6d-20e7cb61767b] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1136.787427] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1136.910771] env[68217]: DEBUG nova.objects.instance [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'numa_topology' on Instance uuid 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.003620] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: ef9ab68e-9cb7-4ad4-bea7-db648d8dd5fe] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1137.300315] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.413529] env[68217]: INFO nova.compute.claims [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1137.480812] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ad70520-9c41-4f71-be4b-d016ed7e0eed tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.309s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1137.489670] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1137.489942] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1137.506223] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: aa4b9cc8-d0dc-4a0b-9eec-dceace695df9] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1137.789142] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1137.992318] env[68217]: INFO nova.compute.manager [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Detaching volume e20c6f20-79fe-420d-b9df-0288702c53dc [ 1138.010584] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 95e625e9-a726-4c3c-be66-7b8ce93b5f8a] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1138.021138] env[68217]: INFO nova.virt.block_device [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Attempting to driver detach volume e20c6f20-79fe-420d-b9df-0288702c53dc from mountpoint /dev/sdc [ 1138.021367] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1138.021554] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594403', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'name': 'volume-e20c6f20-79fe-420d-b9df-0288702c53dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'serial': 'e20c6f20-79fe-420d-b9df-0288702c53dc'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1138.022447] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cae5bd63-f042-4a59-82d6-1836efdcde3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.044757] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-955b085e-4b68-4b0b-a33f-9f6a283079d8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.052368] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-813fb7fe-4452-4382-a603-099a295f67a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.072452] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dfe0095-383b-4cb7-a63c-97db356af5f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.086781] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] The volume has not been displaced from its original location: [datastore1] volume-e20c6f20-79fe-420d-b9df-0288702c53dc/volume-e20c6f20-79fe-420d-b9df-0288702c53dc.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1138.091897] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfiguring VM instance instance-0000006b to detach disk 2002 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1138.092395] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6115ccea-0e2c-4d18-a4e0-b57548ab4812 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.109691] env[68217]: DEBUG oslo_vmware.api [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1138.109691] env[68217]: value = "task-2962078" [ 1138.109691] env[68217]: _type = "Task" [ 1138.109691] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.117088] env[68217]: DEBUG oslo_vmware.api [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962078, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.289654] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.513825] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 149bd497-4ee6-4ca2-9d18-b276e773aedf] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1138.573501] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd202a12-9453-4ca1-9ec6-29134d055415 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.580619] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53e73caf-7e57-45f8-9db3-68ab394e8bc4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.609983] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d65edf8f-9d0c-45b6-acca-bc9e1ead1238 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.619617] env[68217]: DEBUG oslo_vmware.api [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962078, 'name': ReconfigVM_Task, 'duration_secs': 0.199277} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1138.621275] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Reconfigured VM instance instance-0000006b to detach disk 2002 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1138.625847] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7a42a56-1899-4e35-bc2f-35369ad358a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.636161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cc4708-dae5-472e-923c-bc59ee028410 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1138.649274] env[68217]: DEBUG nova.compute.provider_tree [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.651381] env[68217]: DEBUG oslo_vmware.api [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1138.651381] env[68217]: value = "task-2962079" [ 1138.651381] env[68217]: _type = "Task" [ 1138.651381] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1138.658492] env[68217]: DEBUG oslo_vmware.api [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962079, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1138.789610] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.017320] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 2e3dae16-dba3-4230-913d-7a5c3469e36e] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1139.153414] env[68217]: DEBUG nova.scheduler.client.report [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1139.167584] env[68217]: DEBUG oslo_vmware.api [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962079, 'name': ReconfigVM_Task, 'duration_secs': 0.130779} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1139.167584] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594403', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'name': 'volume-e20c6f20-79fe-420d-b9df-0288702c53dc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b', 'attached_at': '', 'detached_at': '', 'volume_id': 'e20c6f20-79fe-420d-b9df-0288702c53dc', 'serial': 'e20c6f20-79fe-420d-b9df-0288702c53dc'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1139.291790] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.521738] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: b7fe971e-353f-427c-896c-32f9de0d70e7] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1139.658496] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.251s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1139.687954] env[68217]: INFO nova.network.neutron [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1139.707752] env[68217]: DEBUG nova.objects.instance [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'flavor' on Instance uuid 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1139.791618] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1139.932302] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1139.932653] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594405', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'name': 'volume-a82505dd-3944-4311-bbf1-39afd9d16e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '400058d8-f9ca-41b9-a671-b04b0511d074', 'attached_at': '', 'detached_at': '', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'serial': 'a82505dd-3944-4311-bbf1-39afd9d16e72'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1139.933736] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dc9add1-cb06-4cdc-8fe0-25e97d0a4d4f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.949963] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c94a95-dc2d-4440-a74a-e244230b7ceb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.974394] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfiguring VM instance instance-0000006f to attach disk [datastore2] volume-a82505dd-3944-4311-bbf1-39afd9d16e72/volume-a82505dd-3944-4311-bbf1-39afd9d16e72.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1139.974696] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a960185c-32a7-4062-a02b-9ce6ae01c998 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1139.993030] env[68217]: DEBUG oslo_vmware.api [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1139.993030] env[68217]: value = "task-2962080" [ 1139.993030] env[68217]: _type = "Task" [ 1139.993030] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.000369] env[68217]: DEBUG oslo_vmware.api [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962080, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.025083] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 23366029-e754-49dc-ba56-7a0d92232d81] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1140.291840] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task} progress is 18%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.502642] env[68217]: DEBUG oslo_vmware.api [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962080, 'name': ReconfigVM_Task, 'duration_secs': 0.350667} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.503041] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfigured VM instance instance-0000006f to attach disk [datastore2] volume-a82505dd-3944-4311-bbf1-39afd9d16e72/volume-a82505dd-3944-4311-bbf1-39afd9d16e72.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1140.508247] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-193e1afa-af57-4923-a7d4-60cda03242d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.523782] env[68217]: DEBUG oslo_vmware.api [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1140.523782] env[68217]: value = "task-2962081" [ 1140.523782] env[68217]: _type = "Task" [ 1140.523782] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.531758] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e642c93b-ca48-4d23-9abb-ff243855d8d0] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1140.533882] env[68217]: DEBUG oslo_vmware.api [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962081, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.714718] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a61e9dfb-60a5-49d8-aa8a-b32083d27475 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.225s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1140.791406] env[68217]: DEBUG oslo_vmware.api [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962072, 'name': ReconfigVM_Task, 'duration_secs': 6.508421} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.791641] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1140.791847] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Reconfigured VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1141.034326] env[68217]: DEBUG oslo_vmware.api [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962081, 'name': ReconfigVM_Task, 'duration_secs': 0.128469} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.034658] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594405', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'name': 'volume-a82505dd-3944-4311-bbf1-39afd9d16e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '400058d8-f9ca-41b9-a671-b04b0511d074', 'attached_at': '', 'detached_at': '', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'serial': 'a82505dd-3944-4311-bbf1-39afd9d16e72'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1141.036738] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: ba39e563-3e3a-40aa-815f-760f0f37a55d] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1141.155139] env[68217]: DEBUG nova.compute.manager [req-82475de6-37b3-4d9c-bb25-a3934a4d686a req-fbb854c1-e9d0-4951-ba5a-39d1fe33d5f1 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-vif-plugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1141.155388] env[68217]: DEBUG oslo_concurrency.lockutils [req-82475de6-37b3-4d9c-bb25-a3934a4d686a req-fbb854c1-e9d0-4951-ba5a-39d1fe33d5f1 service nova] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.155595] env[68217]: DEBUG oslo_concurrency.lockutils [req-82475de6-37b3-4d9c-bb25-a3934a4d686a req-fbb854c1-e9d0-4951-ba5a-39d1fe33d5f1 service nova] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.155758] env[68217]: DEBUG oslo_concurrency.lockutils [req-82475de6-37b3-4d9c-bb25-a3934a4d686a req-fbb854c1-e9d0-4951-ba5a-39d1fe33d5f1 service nova] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.155924] env[68217]: DEBUG nova.compute.manager [req-82475de6-37b3-4d9c-bb25-a3934a4d686a req-fbb854c1-e9d0-4951-ba5a-39d1fe33d5f1 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] No waiting events found dispatching network-vif-plugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1141.156250] env[68217]: WARNING nova.compute.manager [req-82475de6-37b3-4d9c-bb25-a3934a4d686a req-fbb854c1-e9d0-4951-ba5a-39d1fe33d5f1 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received unexpected event network-vif-plugged-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 for instance with vm_state shelved_offloaded and task_state spawning. [ 1141.287444] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1141.287837] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1141.288376] env[68217]: DEBUG nova.network.neutron [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1141.540836] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 7a01c2c5-3108-4382-85c5-a5ea5e6e160c] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1141.889515] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.889760] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.890190] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1141.890692] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1141.890867] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1141.892871] env[68217]: INFO nova.compute.manager [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Terminating instance [ 1142.046054] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 41d279f2-477b-44b2-9eb9-7b782c9c890f] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1142.072878] env[68217]: DEBUG nova.objects.instance [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'flavor' on Instance uuid 400058d8-f9ca-41b9-a671-b04b0511d074 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.142066] env[68217]: DEBUG nova.network.neutron [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.221354] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1142.221538] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1142.221716] env[68217]: DEBUG nova.network.neutron [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1142.398617] env[68217]: DEBUG nova.compute.manager [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1142.398930] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1142.399806] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c02e31d-272b-4473-b5b5-eba7322b08a3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.411268] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1142.411648] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-20f422ab-80b7-496b-862e-c95e6d53da60 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.418224] env[68217]: DEBUG oslo_vmware.api [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1142.418224] env[68217]: value = "task-2962082" [ 1142.418224] env[68217]: _type = "Task" [ 1142.418224] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.426239] env[68217]: DEBUG oslo_vmware.api [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962082, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.552412] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: bd62c682-24f2-4559-887a-03186409f699] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1142.578880] env[68217]: DEBUG oslo_concurrency.lockutils [None req-be5e5d88-55f7-4d14-8ca3-9d2e51c7c784 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.277s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1142.645421] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1142.673149] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='5c193eafd2621590ae829ff72e62c1d1',container_format='bare',created_at=2025-03-12T08:24:09Z,direct_url=,disk_format='vmdk',id=5f518e85-1779-448c-b381-424d3d5af7dd,min_disk=1,min_ram=0,name='tempest-AttachVolumeShelveTestJSON-server-866138804-shelved',owner='c0522eaa6ebc48a28651f6b3bf1434f3',properties=ImageMetaProps,protected=,size=31669248,status='active',tags=,updated_at=2025-03-12T08:24:23Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1142.673396] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1142.673553] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1142.673735] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1142.673879] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1142.674048] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1142.674267] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1142.674424] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1142.674589] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1142.674747] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1142.674915] env[68217]: DEBUG nova.virt.hardware [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1142.675808] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16a853ba-89ae-40aa-b55d-647375db11e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.683701] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09165e91-493d-44f9-9e37-4e39b3015018 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.696779] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b3:5d:10', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '62237242-7ce2-4664-a1c5-6783b516b507', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c0f23ace-2be2-4dca-b47a-a5b77ba68dd3', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1142.704360] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1142.704623] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1142.704829] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-128212a9-31a3-4a0f-85c7-661d5e9cecd4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.727639] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1142.727639] env[68217]: value = "task-2962083" [ 1142.727639] env[68217]: _type = "Task" [ 1142.727639] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1142.737012] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962083, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1142.927576] env[68217]: DEBUG oslo_vmware.api [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962082, 'name': PowerOffVM_Task, 'duration_secs': 0.207808} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1142.927846] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1142.928033] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1142.928286] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75bd7a5c-adda-4e7d-ace4-b9051732ea92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1142.939814] env[68217]: INFO nova.network.neutron [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Port 96505674-7581-4b19-93ab-1b3fe17ed499 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1142.940200] env[68217]: DEBUG nova.network.neutron [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1142.993285] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1142.993489] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1142.993608] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Deleting the datastore file [datastore1] 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1142.993868] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-080c382d-4542-4ebe-a300-14bac5eadf09 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.000998] env[68217]: DEBUG oslo_vmware.api [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for the task: (returnval){ [ 1143.000998] env[68217]: value = "task-2962085" [ 1143.000998] env[68217]: _type = "Task" [ 1143.000998] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.009390] env[68217]: DEBUG oslo_vmware.api [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962085, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.055265] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e550084b-84dd-4ae8-8667-2edb45b49e2b] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1143.192042] env[68217]: DEBUG nova.compute.manager [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1143.192257] env[68217]: DEBUG nova.compute.manager [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing instance network info cache due to event network-changed-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1143.192467] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Acquiring lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.192611] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Acquired lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.192768] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Refreshing network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1143.222851] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-6789dd7d-d042-4c29-a963-2b4b982d5b43-96505674-7581-4b19-93ab-1b3fe17ed499" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1143.223084] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-6789dd7d-d042-4c29-a963-2b4b982d5b43-96505674-7581-4b19-93ab-1b3fe17ed499" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1143.223410] env[68217]: DEBUG nova.objects.instance [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'flavor' on Instance uuid 6789dd7d-d042-4c29-a963-2b4b982d5b43 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1143.238902] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962083, 'name': CreateVM_Task, 'duration_secs': 0.370845} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.239056] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1143.239690] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.239847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.240219] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1143.240460] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ea21d907-0297-4dcf-95e5-148331a89f78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.245557] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1143.245557] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52eb1b6f-e926-84d2-f61a-3bcf0dcc5bff" [ 1143.245557] env[68217]: _type = "Task" [ 1143.245557] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.253493] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52eb1b6f-e926-84d2-f61a-3bcf0dcc5bff, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.442693] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.511012] env[68217]: DEBUG oslo_vmware.api [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Task: {'id': task-2962085, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.131718} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1143.511290] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1143.511475] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1143.511656] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1143.511830] env[68217]: INFO nova.compute.manager [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1143.512090] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1143.512292] env[68217]: DEBUG nova.compute.manager [-] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1143.512389] env[68217]: DEBUG nova.network.neutron [-] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1143.549296] env[68217]: DEBUG nova.compute.manager [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1143.558036] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e9e05b6e-f7ee-4dc3-96fb-6b3e39a7b9a2] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1143.758422] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1143.758794] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Processing image 5f518e85-1779-448c-b381-424d3d5af7dd {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1143.759164] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd/5f518e85-1779-448c-b381-424d3d5af7dd.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1143.759401] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquired lock "[datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd/5f518e85-1779-448c-b381-424d3d5af7dd.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1143.759680] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1143.760034] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8b86aed-cea9-43d1-8116-7ed832cd36dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.769666] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1143.769916] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1143.770926] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69d43fbf-2fb7-47ae-a0b9-0da6c9535aa0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.776299] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1143.776299] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52108f51-47fb-c2cd-0a95-4e164b4cb08e" [ 1143.776299] env[68217]: _type = "Task" [ 1143.776299] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.783722] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52108f51-47fb-c2cd-0a95-4e164b4cb08e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1143.947282] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ffd8e164-32da-4d89-bff0-7bbd74f28511 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-89136574-575c-47da-928c-bd7a5dbb3a98-96505674-7581-4b19-93ab-1b3fe17ed499" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.268s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1144.060425] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 95dcd20c-b3d3-4b1d-a32e-41bef1c2e44f] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.071589] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1144.071876] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1144.240464] env[68217]: DEBUG nova.objects.instance [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'pci_requests' on Instance uuid 6789dd7d-d042-4c29-a963-2b4b982d5b43 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1144.286694] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Preparing fetch location {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1144.286938] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Fetch image to [datastore2] OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6/OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6.vmdk {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1144.287153] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Downloading stream optimized image 5f518e85-1779-448c-b381-424d3d5af7dd to [datastore2] OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6/OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6.vmdk on the data store datastore2 as vApp {{(pid=68217) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1144.287292] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Downloading image file data 5f518e85-1779-448c-b381-424d3d5af7dd to the ESX as VM named 'OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6' {{(pid=68217) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1144.361968] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1144.361968] env[68217]: value = "resgroup-9" [ 1144.361968] env[68217]: _type = "ResourcePool" [ 1144.361968] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1144.362257] env[68217]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-88cc4f39-bcf0-4f55-b031-e17658405e98 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.380124] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updated VIF entry in instance network info cache for port c0f23ace-2be2-4dca-b47a-a5b77ba68dd3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1144.380475] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [{"id": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "address": "fa:16:3e:b3:5d:10", "network": {"id": "b560d1ba-8945-443a-9586-083067363663", "bridge": "br-int", "label": "tempest-AttachVolumeShelveTestJSON-25035929-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.228", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c0522eaa6ebc48a28651f6b3bf1434f3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "62237242-7ce2-4664-a1c5-6783b516b507", "external-id": "nsx-vlan-transportzone-295", "segmentation_id": 295, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc0f23ace-2b", "ovs_interfaceid": "c0f23ace-2be2-4dca-b47a-a5b77ba68dd3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.387179] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease: (returnval){ [ 1144.387179] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5208c4ca-f32b-7c85-b913-a649e47ef149" [ 1144.387179] env[68217]: _type = "HttpNfcLease" [ 1144.387179] env[68217]: } obtained for vApp import into resource pool (val){ [ 1144.387179] env[68217]: value = "resgroup-9" [ 1144.387179] env[68217]: _type = "ResourcePool" [ 1144.387179] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1144.387457] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the lease: (returnval){ [ 1144.387457] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5208c4ca-f32b-7c85-b913-a649e47ef149" [ 1144.387457] env[68217]: _type = "HttpNfcLease" [ 1144.387457] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1144.394147] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1144.394147] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5208c4ca-f32b-7c85-b913-a649e47ef149" [ 1144.394147] env[68217]: _type = "HttpNfcLease" [ 1144.394147] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1144.563474] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 7584180b-efa6-4038-9f3a-619ab7937553] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1144.576222] env[68217]: INFO nova.compute.claims [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1144.743493] env[68217]: DEBUG nova.objects.base [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Object Instance<6789dd7d-d042-4c29-a963-2b4b982d5b43> lazy-loaded attributes: flavor,pci_requests {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1144.743770] env[68217]: DEBUG nova.network.neutron [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1144.817655] env[68217]: DEBUG nova.policy [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9172578aec2742bb9aafc58752b926c3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ef7e30ed571740f3b3ea6b24fc9c6e20', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1144.883663] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Releasing lock "refresh_cache-815d1801-fa07-4466-850d-b1a36d630d46" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1144.883946] env[68217]: DEBUG nova.compute.manager [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1144.884127] env[68217]: DEBUG nova.compute.manager [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing instance network info cache due to event network-changed-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1144.884342] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Acquiring lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1144.884488] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Acquired lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1144.884650] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Refreshing network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1144.895323] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1144.895323] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5208c4ca-f32b-7c85-b913-a649e47ef149" [ 1144.895323] env[68217]: _type = "HttpNfcLease" [ 1144.895323] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1144.895615] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1144.895615] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5208c4ca-f32b-7c85-b913-a649e47ef149" [ 1144.895615] env[68217]: _type = "HttpNfcLease" [ 1144.895615] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1144.896409] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1fa5d03-37c5-4723-bd55-a3822d1c969e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.904276] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bb0ed-b7aa-1e43-0964-62cde288a0e0/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1144.904453] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating HTTP connection to write to file with size = 31669248 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bb0ed-b7aa-1e43-0964-62cde288a0e0/disk-0.vmdk. {{(pid=68217) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1144.965026] env[68217]: DEBUG nova.network.neutron [-] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1144.971740] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-2cd2a932-1f1d-4d2f-848f-cf3b579d5eae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.068386] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: d14026b1-84dd-430e-be94-94dcb1f47473] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.083632] env[68217]: INFO nova.compute.resource_tracker [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating resource usage from migration e3c97425-57b9-4c9a-b7c7-d35f321e6398 [ 1145.225384] env[68217]: DEBUG nova.compute.manager [req-4d5c6c8a-5c83-48c0-aa6b-dda444d93148 req-80b67d01-df68-465d-a97d-8678952c0093 service nova] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Received event network-vif-deleted-9414aa68-f0b4-452d-a959-86e8f0acf53e {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1145.308089] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d5c1a3c-687e-4588-a6bb-ec5ff250bad7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.318248] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0ede7e9-39fe-4558-883a-1ac7a91ccc31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.355330] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e69c900-0975-43f1-8094-5de8bb2a66ff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.365962] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-013820d9-c11c-46a0-a847-d4328b7fd70f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.382502] env[68217]: DEBUG nova.compute.provider_tree [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1145.468126] env[68217]: INFO nova.compute.manager [-] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Took 1.96 seconds to deallocate network for instance. [ 1145.570959] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 0552d616-a406-4dfa-8a70-82f39fb98bbc] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1145.588750] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updated VIF entry in instance network info cache for port 513d21ef-f0b3-47f7-96ae-f01c23ac3ef1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1145.589136] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [{"id": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "address": "fa:16:3e:41:6d:56", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap513d21ef-f0", "ovs_interfaceid": "513d21ef-f0b3-47f7-96ae-f01c23ac3ef1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.887960] env[68217]: DEBUG nova.scheduler.client.report [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1145.976752] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1146.073885] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: d0d8ed27-003e-43e2-8a07-041420a2c758] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.085457] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Completed reading data from the image iterator. {{(pid=68217) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1146.085677] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bb0ed-b7aa-1e43-0964-62cde288a0e0/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1146.086807] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc105a1a-6d29-46f9-ab5d-a68578c956cf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.091429] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Releasing lock "refresh_cache-89136574-575c-47da-928c-bd7a5dbb3a98" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1146.091768] env[68217]: DEBUG nova.compute.manager [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1146.091895] env[68217]: DEBUG nova.compute.manager [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing instance network info cache due to event network-changed-3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1146.092119] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.092364] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.092661] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1146.095454] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bb0ed-b7aa-1e43-0964-62cde288a0e0/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1146.095632] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bb0ed-b7aa-1e43-0964-62cde288a0e0/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1146.096177] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3d940582-ba1f-47a2-8a7a-85c679316d91 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.349194] env[68217]: DEBUG nova.network.neutron [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Successfully updated port: 96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1146.393123] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.321s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1146.393336] env[68217]: INFO nova.compute.manager [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Migrating [ 1146.399955] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.423s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1146.400214] env[68217]: DEBUG nova.objects.instance [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lazy-loading 'resources' on Instance uuid 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1146.578910] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 580e6909-7d05-447a-a378-f0b8b71f059a] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1146.711640] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/527bb0ed-b7aa-1e43-0964-62cde288a0e0/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1146.712012] env[68217]: INFO nova.virt.vmwareapi.images [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Downloaded image file data 5f518e85-1779-448c-b381-424d3d5af7dd [ 1146.713260] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35230c8f-301b-43db-870e-4547512e7b7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.734291] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8042e61d-3713-4a0b-80ab-752426243832 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.799779] env[68217]: INFO nova.virt.vmwareapi.images [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] The imported VM was unregistered [ 1146.802491] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Caching image {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1146.802732] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Creating directory with path [datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1146.803048] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fa375c41-e4ea-4ac7-a87b-2ceff564f1d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.834561] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Created directory with path [datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1146.834757] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6/OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6.vmdk to [datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd/5f518e85-1779-448c-b381-424d3d5af7dd.vmdk. {{(pid=68217) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1146.835027] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-d0941d03-48dd-4dcf-abb2-f075e362233a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1146.842045] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1146.842045] env[68217]: value = "task-2962088" [ 1146.842045] env[68217]: _type = "Task" [ 1146.842045] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1146.851557] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.851919] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962088, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1146.878716] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updated VIF entry in instance network info cache for port 3ee5d607-0533-4e05-9447-4840b4e48cdd. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1146.879095] env[68217]: DEBUG nova.network.neutron [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1146.911063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1146.911315] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1146.911663] env[68217]: DEBUG nova.network.neutron [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1147.082798] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: da1524a7-2756-4429-ada2-b1f493544bd2] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1147.100721] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3defe6e-16a4-4e77-910f-88db639897a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.108659] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0624303f-32a0-43fe-9097-83c0cdc031c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.140963] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2a2ad15-250d-4ec2-bdd8-5ff2f768ef21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.149385] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92916402-3288-4e16-aacb-4559b5f85dba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.164700] env[68217]: DEBUG nova.compute.provider_tree [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1147.250943] env[68217]: DEBUG nova.compute.manager [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-vif-plugged-96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1147.251102] env[68217]: DEBUG oslo_concurrency.lockutils [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1147.251299] env[68217]: DEBUG oslo_concurrency.lockutils [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1147.251424] env[68217]: DEBUG oslo_concurrency.lockutils [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1147.251616] env[68217]: DEBUG nova.compute.manager [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] No waiting events found dispatching network-vif-plugged-96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1147.251769] env[68217]: WARNING nova.compute.manager [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received unexpected event network-vif-plugged-96505674-7581-4b19-93ab-1b3fe17ed499 for instance with vm_state active and task_state None. [ 1147.251892] env[68217]: DEBUG nova.compute.manager [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-changed-96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1147.252058] env[68217]: DEBUG nova.compute.manager [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing instance network info cache due to event network-changed-96505674-7581-4b19-93ab-1b3fe17ed499. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1147.252225] env[68217]: DEBUG oslo_concurrency.lockutils [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1148.042261] env[68217]: DEBUG oslo_concurrency.lockutils [req-88652b04-ef81-4301-b4a3-56c803ef679f req-f0d68436-5517-4bd0-bf9d-e95d5890a279 service nova] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.045078] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: b5e15801-301a-4ee6-87d2-bbf749967631] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.048033] env[68217]: DEBUG nova.scheduler.client.report [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1148.053952] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1148.056029] env[68217]: DEBUG nova.network.neutron [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1148.064153] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962088, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.339186] env[68217]: DEBUG nova.network.neutron [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1148.553082] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 58c15727-79ae-404f-a054-d71e3be498cc] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1148.554952] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962088, 'name': MoveVirtualDisk_Task} progress is 57%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1148.556967] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.157s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1148.579152] env[68217]: INFO nova.scheduler.client.report [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Deleted allocations for instance 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b [ 1148.597163] env[68217]: WARNING nova.network.neutron [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] 246af4c9-69b4-4542-84b9-2afe67cf297a already exists in list: networks containing: ['246af4c9-69b4-4542-84b9-2afe67cf297a']. ignoring it [ 1148.842707] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1148.884282] env[68217]: DEBUG nova.network.neutron [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "96505674-7581-4b19-93ab-1b3fe17ed499", "address": "fa:16:3e:be:61:78", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96505674-75", "ovs_interfaceid": "96505674-7581-4b19-93ab-1b3fe17ed499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1149.054611] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962088, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.091540] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a78a15aa-16c5-4d95-a24c-6922b74a7be1 tempest-AttachVolumeTestJSON-1217616865 tempest-AttachVolumeTestJSON-1217616865-project-member] Lock "2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.201s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1149.388059] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1149.388273] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1149.388484] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.388787] env[68217]: DEBUG oslo_concurrency.lockutils [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1149.389017] env[68217]: DEBUG nova.network.neutron [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Refreshing network info cache for port 96505674-7581-4b19-93ab-1b3fe17ed499 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1149.391410] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06801d79-a609-49ea-acab-513be9364101 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.413325] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1149.413644] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1149.413818] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1149.413998] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1149.414158] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1149.414300] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1149.414507] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1149.414671] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1149.414831] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1149.414990] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1149.415172] env[68217]: DEBUG nova.virt.hardware [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1149.421566] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Reconfiguring VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 1149.422428] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8f94730-52a4-4097-8f42-b31b6b76d87a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.440680] env[68217]: DEBUG oslo_vmware.api [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1149.440680] env[68217]: value = "task-2962089" [ 1149.440680] env[68217]: _type = "Task" [ 1149.440680] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.448838] env[68217]: DEBUG oslo_vmware.api [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962089, 'name': ReconfigVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.552549] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962088, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.693874} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1149.552832] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6/OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6.vmdk to [datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd/5f518e85-1779-448c-b381-424d3d5af7dd.vmdk. [ 1149.553039] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Cleaning up location [datastore2] OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1149.553187] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_b493f053-906a-4742-8d8c-b468b969f3e6 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1149.553479] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7d25db40-979b-437d-857e-142e3d91e409 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.559432] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1149.559432] env[68217]: value = "task-2962090" [ 1149.559432] env[68217]: _type = "Task" [ 1149.559432] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1149.566464] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962090, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1149.708308] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1149.708573] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1149.951719] env[68217]: DEBUG oslo_vmware.api [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962089, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.067883] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962090, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.038554} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.068209] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1150.068302] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Releasing lock "[datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd/5f518e85-1779-448c-b381-424d3d5af7dd.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.068581] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd/5f518e85-1779-448c-b381-424d3d5af7dd.vmdk to [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1150.068819] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7890086e-f2f2-4aa5-b3a1-0b98391e035e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.076296] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1150.076296] env[68217]: value = "task-2962092" [ 1150.076296] env[68217]: _type = "Task" [ 1150.076296] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.083667] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.101293] env[68217]: DEBUG nova.network.neutron [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updated VIF entry in instance network info cache for port 96505674-7581-4b19-93ab-1b3fe17ed499. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1150.101700] env[68217]: DEBUG nova.network.neutron [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "96505674-7581-4b19-93ab-1b3fe17ed499", "address": "fa:16:3e:be:61:78", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap96505674-75", "ovs_interfaceid": "96505674-7581-4b19-93ab-1b3fe17ed499", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1150.211565] env[68217]: INFO nova.compute.manager [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Detaching volume a6379fd6-1468-4eff-945c-6ffe74897b05 [ 1150.243603] env[68217]: INFO nova.virt.block_device [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Attempting to driver detach volume a6379fd6-1468-4eff-945c-6ffe74897b05 from mountpoint /dev/sdb [ 1150.243873] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1150.244070] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594389', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'name': 'volume-a6379fd6-1468-4eff-945c-6ffe74897b05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6b4dff91-254e-43cc-85cf-7de6214dcafd', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'serial': 'a6379fd6-1468-4eff-945c-6ffe74897b05'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1150.245212] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0114f17c-01d2-4c46-b82a-fc3acdc2996e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.266407] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d685a22-8fe3-4cf3-a20b-e3a4b3215631 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.274520] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd1c0599-46ac-43df-98cd-14896c74c139 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.293985] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b588a76-13b1-47bb-be59-c920eed64151 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.308536] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] The volume has not been displaced from its original location: [datastore2] volume-a6379fd6-1468-4eff-945c-6ffe74897b05/volume-a6379fd6-1468-4eff-945c-6ffe74897b05.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1150.313688] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfiguring VM instance instance-00000067 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1150.313969] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-52696303-b1f1-46c0-830f-63e01172a3f0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.331711] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1150.331711] env[68217]: value = "task-2962093" [ 1150.331711] env[68217]: _type = "Task" [ 1150.331711] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.339827] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962093, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.358049] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f338ed60-93f0-40de-b6d1-3683e35cbe47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.379098] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance '400058d8-f9ca-41b9-a671-b04b0511d074' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1150.451615] env[68217]: DEBUG oslo_vmware.api [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962089, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.586104] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.604772] env[68217]: DEBUG oslo_concurrency.lockutils [req-43d53619-70d1-40c4-993e-a7b1d36476dd req-72dc2fc8-08ac-484f-88f2-fc66e1b361e9 service nova] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1150.841813] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962093, 'name': ReconfigVM_Task, 'duration_secs': 0.23241} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1150.842109] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Reconfigured VM instance instance-00000067 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1150.846944] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0e968af7-0f8c-4399-8b38-1877ba95b173 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.862173] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1150.862173] env[68217]: value = "task-2962094" [ 1150.862173] env[68217]: _type = "Task" [ 1150.862173] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.870429] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962094, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.885281] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1150.885550] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e0a952be-6218-434c-8df1-6555abd3ee8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.890802] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1150.890802] env[68217]: value = "task-2962095" [ 1150.890802] env[68217]: _type = "Task" [ 1150.890802] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.898241] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962095, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.951719] env[68217]: DEBUG oslo_vmware.api [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962089, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.088158] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.377540] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962094, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.404281] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962095, 'name': PowerOffVM_Task, 'duration_secs': 0.503244} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.404281] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1151.404281] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance '400058d8-f9ca-41b9-a671-b04b0511d074' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1151.453701] env[68217]: DEBUG oslo_vmware.api [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962089, 'name': ReconfigVM_Task, 'duration_secs': 1.756614} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.454270] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1151.454470] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Reconfigured VM to attach interface {{(pid=68217) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 1151.546859] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.591735] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.874099] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962094, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.910537] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1151.910767] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1151.911020] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1151.911275] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1151.911427] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1151.911579] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1151.911789] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1151.911945] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1151.912124] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1151.912287] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1151.912453] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1151.917621] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c89f5cb4-0638-45fe-9cc0-e4d8e72f1b47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.936433] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1151.936433] env[68217]: value = "task-2962096" [ 1151.936433] env[68217]: _type = "Task" [ 1151.936433] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.946298] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962096, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.958860] env[68217]: DEBUG oslo_concurrency.lockutils [None req-921fb7d7-2791-4dfc-884f-380cc819a5cb tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-6789dd7d-d042-4c29-a963-2b4b982d5b43-96505674-7581-4b19-93ab-1b3fe17ed499" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.736s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1152.091087] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task} progress is 38%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.374935] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962094, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.400700] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1152.400936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1152.448345] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962096, 'name': ReconfigVM_Task, 'duration_secs': 0.345928} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.448626] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance '400058d8-f9ca-41b9-a671-b04b0511d074' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1152.592129] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task} progress is 63%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1152.876723] env[68217]: DEBUG oslo_vmware.api [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962094, 'name': ReconfigVM_Task, 'duration_secs': 1.824839} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1152.877052] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594389', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'name': 'volume-a6379fd6-1468-4eff-945c-6ffe74897b05', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '6b4dff91-254e-43cc-85cf-7de6214dcafd', 'attached_at': '', 'detached_at': '', 'volume_id': 'a6379fd6-1468-4eff-945c-6ffe74897b05', 'serial': 'a6379fd6-1468-4eff-945c-6ffe74897b05'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1152.904311] env[68217]: DEBUG nova.compute.utils [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1152.955337] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1152.955744] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1152.955999] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1152.956221] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1152.956373] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1152.956522] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1152.956731] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1152.956885] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1152.957060] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1152.957226] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1152.957421] env[68217]: DEBUG nova.virt.hardware [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1152.963313] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1152.963709] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f02eb60d-302a-42a4-950c-c9702dab61ff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.984641] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1152.984641] env[68217]: value = "task-2962098" [ 1152.984641] env[68217]: _type = "Task" [ 1152.984641] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1152.996114] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962098, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.092445] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task} progress is 83%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.408158] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1153.421599] env[68217]: DEBUG nova.objects.instance [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lazy-loading 'flavor' on Instance uuid 6b4dff91-254e-43cc-85cf-7de6214dcafd {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1153.493824] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962098, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.591473] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962092, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.390396} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.591735] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/5f518e85-1779-448c-b381-424d3d5af7dd/5f518e85-1779-448c-b381-424d3d5af7dd.vmdk to [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1153.592588] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e486427-6a62-45f2-9fb7-25d136ff10ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.614611] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Reconfiguring VM instance instance-00000069 to attach disk [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1153.614864] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2ce59e46-b7d5-4675-9efe-f691cf004fdf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.632884] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1153.632884] env[68217]: value = "task-2962099" [ 1153.632884] env[68217]: _type = "Task" [ 1153.632884] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.640190] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962099, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1153.839502] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "interface-6789dd7d-d042-4c29-a963-2b4b982d5b43-96505674-7581-4b19-93ab-1b3fe17ed499" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1153.839799] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-6789dd7d-d042-4c29-a963-2b4b982d5b43-96505674-7581-4b19-93ab-1b3fe17ed499" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1153.998196] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962098, 'name': ReconfigVM_Task, 'duration_secs': 0.5227} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1153.998611] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1153.999573] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72f9537d-10a5-4e3a-bbbf-d10492ff6752 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.028868] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1154.028868] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1284a6b5-5dfb-4679-ade2-5cb9f756ab6c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.047364] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1154.047364] env[68217]: value = "task-2962100" [ 1154.047364] env[68217]: _type = "Task" [ 1154.047364] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.054585] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962100, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.144329] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962099, 'name': ReconfigVM_Task, 'duration_secs': 0.258312} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.144611] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Reconfigured VM instance instance-00000069 to attach disk [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46/815d1801-fa07-4466-850d-b1a36d630d46.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1154.145264] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ef0df903-1fe0-4469-966c-1f352ee1eb67 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.151477] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1154.151477] env[68217]: value = "task-2962101" [ 1154.151477] env[68217]: _type = "Task" [ 1154.151477] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.159139] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962101, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.343310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1154.343310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1154.343555] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3241c0-d36a-418b-9bd0-464d05aa4b5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.361107] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e9a5904-46f9-40b9-aae7-fc9217380fe1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.387328] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Reconfiguring VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 1154.387667] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6bb46a8f-ec5a-468d-8045-a59feb27b771 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.408023] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1154.408023] env[68217]: value = "task-2962102" [ 1154.408023] env[68217]: _type = "Task" [ 1154.408023] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.418506] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.429543] env[68217]: DEBUG oslo_concurrency.lockutils [None req-79ce5f01-09c6-4323-b58a-2c51cfc1384d tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.721s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.480315] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.480577] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.480864] env[68217]: INFO nova.compute.manager [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Attaching volume 60cd88df-fdc0-444d-9f98-a6b1725c8350 to /dev/sdb [ 1154.516341] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-561e5537-a7c7-4451-ae2f-c56a563d9217 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.524054] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6b6d2ec-ded3-4eda-ab55-a7dcfc8fa8e8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.537564] env[68217]: DEBUG nova.virt.block_device [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updating existing volume attachment record: cb9e316f-6b06-4ca0-9e48-d6bdd7506adf {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1154.555277] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962100, 'name': ReconfigVM_Task, 'duration_secs': 0.344668} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.555561] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1154.555938] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance '400058d8-f9ca-41b9-a671-b04b0511d074' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1154.659861] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962101, 'name': Rename_Task, 'duration_secs': 0.142099} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.660139] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1154.660378] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c97e1825-c69d-4d9b-a777-f15523d45e1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.666715] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1154.666715] env[68217]: value = "task-2962103" [ 1154.666715] env[68217]: _type = "Task" [ 1154.666715] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.673756] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962103, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.918206] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.918431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.918701] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "6b4dff91-254e-43cc-85cf-7de6214dcafd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1154.918915] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1154.919128] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1154.920861] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.921755] env[68217]: INFO nova.compute.manager [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Terminating instance [ 1155.057325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1155.057601] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1155.061563] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aac93b4-b288-4d0d-ba6d-f1a0de9195af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.084006] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a275bddd-3c0d-4d72-8539-f69638e60dbd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.105973] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance '400058d8-f9ca-41b9-a671-b04b0511d074' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1155.176936] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962103, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.417834] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.425606] env[68217]: DEBUG nova.compute.manager [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1155.425815] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1155.426616] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98af22f9-35c7-4b26-a5ef-52deca06c805 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.434661] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1155.434966] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cd4b9d47-9843-4444-ab45-d2ea777da20d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.441388] env[68217]: DEBUG oslo_vmware.api [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1155.441388] env[68217]: value = "task-2962105" [ 1155.441388] env[68217]: _type = "Task" [ 1155.441388] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1155.449376] env[68217]: DEBUG oslo_vmware.api [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.560086] env[68217]: DEBUG nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1155.676578] env[68217]: DEBUG oslo_vmware.api [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962103, 'name': PowerOnVM_Task, 'duration_secs': 0.518778} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.676845] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1155.794150] env[68217]: DEBUG nova.compute.manager [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1155.795125] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc809a44-939d-4d95-aa8c-6506c98c3183 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.917762] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1155.951371] env[68217]: DEBUG oslo_vmware.api [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962105, 'name': PowerOffVM_Task, 'duration_secs': 0.20252} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1155.951566] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1155.951924] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1155.952198] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f1bf7a34-2265-40da-833c-4eed94e59e77 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.011256] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1156.011472] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1156.011653] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleting the datastore file [datastore2] 6b4dff91-254e-43cc-85cf-7de6214dcafd {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1156.012022] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-829fee80-d562-4e84-a55b-f9245a6e62ee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.018009] env[68217]: DEBUG oslo_vmware.api [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1156.018009] env[68217]: value = "task-2962107" [ 1156.018009] env[68217]: _type = "Task" [ 1156.018009] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1156.025735] env[68217]: DEBUG oslo_vmware.api [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962107, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.081638] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1156.081926] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1156.083421] env[68217]: INFO nova.compute.claims [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1156.312016] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e4a90c64-7d25-4ef9-a0c6-5452a0576f6a tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 20.933s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1156.419789] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1156.527781] env[68217]: DEBUG oslo_vmware.api [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962107, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.367462} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1156.528046] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1156.528217] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1156.528395] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1156.528593] env[68217]: INFO nova.compute.manager [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1156.528851] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1156.529067] env[68217]: DEBUG nova.compute.manager [-] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1156.529165] env[68217]: DEBUG nova.network.neutron [-] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1156.787861] env[68217]: DEBUG nova.network.neutron [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Port 189406da-f39b-4370-b43d-945cbb45afb2 binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1156.919691] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.021346] env[68217]: DEBUG nova.compute.manager [req-2475ceba-6452-452c-afcf-b4c2378ab947 req-55e47577-8a17-47a3-8eb5-f40833ec2f56 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Received event network-vif-deleted-753c250a-569f-42f8-a9e7-fed02079c841 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1157.021346] env[68217]: INFO nova.compute.manager [req-2475ceba-6452-452c-afcf-b4c2378ab947 req-55e47577-8a17-47a3-8eb5-f40833ec2f56 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Neutron deleted interface 753c250a-569f-42f8-a9e7-fed02079c841; detaching it from the instance and deleting it from the info cache [ 1157.021535] env[68217]: DEBUG nova.network.neutron [req-2475ceba-6452-452c-afcf-b4c2378ab947 req-55e47577-8a17-47a3-8eb5-f40833ec2f56 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.256262] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec2f526b-f755-4717-aae1-300453442482 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.264735] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12fdfc93-5344-4967-a49d-a2473eb507b4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.301163] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78c7044c-42e8-4156-8762-a93a034ba841 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.308561] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5b8c675-c88f-41ae-9af1-fef67757b274 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.323700] env[68217]: DEBUG nova.compute.provider_tree [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1157.420155] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1157.499711] env[68217]: DEBUG nova.network.neutron [-] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.524844] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f20659c6-a9cc-4a67-a408-cd9962e7243c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.535025] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d812e44-66ed-4464-9557-a26fa3a3ca8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.566431] env[68217]: DEBUG nova.compute.manager [req-2475ceba-6452-452c-afcf-b4c2378ab947 req-55e47577-8a17-47a3-8eb5-f40833ec2f56 service nova] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Detach interface failed, port_id=753c250a-569f-42f8-a9e7-fed02079c841, reason: Instance 6b4dff91-254e-43cc-85cf-7de6214dcafd could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1157.816699] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1157.816907] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1157.817094] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1157.828402] env[68217]: DEBUG nova.scheduler.client.report [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1157.920643] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.004476] env[68217]: INFO nova.compute.manager [-] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Took 1.48 seconds to deallocate network for instance. [ 1158.258914] env[68217]: DEBUG oslo_concurrency.lockutils [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.259134] env[68217]: DEBUG oslo_concurrency.lockutils [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.332542] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.250s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1158.332780] env[68217]: DEBUG nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1158.421130] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.511183] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1158.511428] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1158.511658] env[68217]: DEBUG nova.objects.instance [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lazy-loading 'resources' on Instance uuid 6b4dff91-254e-43cc-85cf-7de6214dcafd {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1158.762293] env[68217]: INFO nova.compute.manager [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Detaching volume 1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d [ 1158.791543] env[68217]: INFO nova.virt.block_device [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Attempting to driver detach volume 1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d from mountpoint /dev/sdb [ 1158.791777] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1158.791958] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1158.792849] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91e4e2bf-448b-4196-81d3-b480ffd1372f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.813880] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e45577f-ffd9-48e5-94c5-b926708b8a4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.820626] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e87df9ac-8f58-449b-a41e-bd8573cf033a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.843417] env[68217]: DEBUG nova.compute.utils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1158.844815] env[68217]: DEBUG nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1158.844984] env[68217]: DEBUG nova.network.neutron [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1158.847295] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9799a342-48c8-4912-9e41-b6de9f1f670a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.852400] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.852567] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1158.852733] env[68217]: DEBUG nova.network.neutron [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1158.865105] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] The volume has not been displaced from its original location: [datastore2] volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d/volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1158.870426] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfiguring VM instance instance-00000068 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1158.873034] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-aa7592c6-df65-493a-a5e4-31b017029080 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.892849] env[68217]: DEBUG oslo_vmware.api [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1158.892849] env[68217]: value = "task-2962110" [ 1158.892849] env[68217]: _type = "Task" [ 1158.892849] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.900693] env[68217]: DEBUG oslo_vmware.api [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962110, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.902061] env[68217]: DEBUG nova.policy [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c867c8ebcaeb49ec91f751e2be5349b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46f4c8c2f4764bd1b995396126b6aaf3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1158.920517] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.089652] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1159.089954] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594408', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'name': 'volume-60cd88df-fdc0-444d-9f98-a6b1725c8350', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '759149be-178f-4238-b9c3-c316d060d6be', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'serial': '60cd88df-fdc0-444d-9f98-a6b1725c8350'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1159.090779] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-460a9630-6198-4836-88ac-a584093dcc66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.109437] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286ac9ae-dc42-4762-819e-0c420bce5975 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.134518] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Reconfiguring VM instance instance-00000071 to attach disk [datastore1] volume-60cd88df-fdc0-444d-9f98-a6b1725c8350/volume-60cd88df-fdc0-444d-9f98-a6b1725c8350.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1159.137127] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-09b75eb4-cd98-440f-aa6e-7def013affee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.157803] env[68217]: DEBUG oslo_vmware.api [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1159.157803] env[68217]: value = "task-2962111" [ 1159.157803] env[68217]: _type = "Task" [ 1159.157803] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.167034] env[68217]: DEBUG oslo_vmware.api [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962111, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.253170] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2318b76-f8e9-4d47-9a65-b4c87d6929fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.258246] env[68217]: DEBUG nova.network.neutron [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1159.262635] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a24a6055-1c52-4db4-8cf0-390b6ba9fe1e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.294861] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10564615-24b7-47cd-a1ec-33c44cb3d05f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.302649] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf13e53-edb7-4e6a-8c4b-48bc8f8fbe7b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.316276] env[68217]: DEBUG nova.compute.provider_tree [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1159.325597] env[68217]: DEBUG nova.network.neutron [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Successfully created port: 4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1159.351450] env[68217]: DEBUG nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1159.402565] env[68217]: DEBUG oslo_vmware.api [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962110, 'name': ReconfigVM_Task, 'duration_secs': 0.395635} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.402831] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Reconfigured VM instance instance-00000068 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1159.407617] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b03592d8-4108-4784-9457-695f96ce681d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.426166] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.427480] env[68217]: DEBUG oslo_vmware.api [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1159.427480] env[68217]: value = "task-2962112" [ 1159.427480] env[68217]: _type = "Task" [ 1159.427480] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.435237] env[68217]: DEBUG oslo_vmware.api [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962112, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.668278] env[68217]: DEBUG oslo_vmware.api [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962111, 'name': ReconfigVM_Task, 'duration_secs': 0.445958} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.668550] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Reconfigured VM instance instance-00000071 to attach disk [datastore1] volume-60cd88df-fdc0-444d-9f98-a6b1725c8350/volume-60cd88df-fdc0-444d-9f98-a6b1725c8350.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1159.673201] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7f724559-442f-4872-968d-86e13472e0d2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.689273] env[68217]: DEBUG oslo_vmware.api [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1159.689273] env[68217]: value = "task-2962113" [ 1159.689273] env[68217]: _type = "Task" [ 1159.689273] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.697220] env[68217]: DEBUG oslo_vmware.api [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962113, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.762665] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.818922] env[68217]: DEBUG nova.scheduler.client.report [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1159.927358] env[68217]: DEBUG oslo_vmware.api [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962102, 'name': ReconfigVM_Task, 'duration_secs': 5.482138} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.927595] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1159.927802] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Reconfigured VM to detach interface {{(pid=68217) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 1159.937478] env[68217]: DEBUG oslo_vmware.api [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962112, 'name': ReconfigVM_Task, 'duration_secs': 0.124734} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.937744] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594391', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'name': 'volume-1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'a4dcc7fb-83e4-4bb9-9c98-9569daee1435', 'attached_at': '', 'detached_at': '', 'volume_id': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d', 'serial': '1a23d7f7-7643-4a30-a9e1-fd8c99a6e28d'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1160.200502] env[68217]: DEBUG oslo_vmware.api [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962113, 'name': ReconfigVM_Task, 'duration_secs': 0.138546} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1160.201868] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594408', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'name': 'volume-60cd88df-fdc0-444d-9f98-a6b1725c8350', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '759149be-178f-4238-b9c3-c316d060d6be', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'serial': '60cd88df-fdc0-444d-9f98-a6b1725c8350'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1160.272268] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1970087-dcea-4d1a-81d7-d28d266ed2c4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.281106] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-783fdb00-593e-491b-a673-dc81f54352c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.324026] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.812s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.350501] env[68217]: INFO nova.scheduler.client.report [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleted allocations for instance 6b4dff91-254e-43cc-85cf-7de6214dcafd [ 1160.361035] env[68217]: DEBUG nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1160.400237] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1160.400599] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1160.400663] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1160.400829] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1160.400980] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1160.401290] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1160.401466] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1160.402160] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1160.402160] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1160.402160] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1160.402355] env[68217]: DEBUG nova.virt.hardware [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1160.403226] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21abea3e-dadc-49d7-a830-1701b4bc98f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.421923] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54181905-4283-4e97-b65f-9f991b45fb1f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.571878] env[68217]: DEBUG nova.objects.instance [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'flavor' on Instance uuid a4dcc7fb-83e4-4bb9-9c98-9569daee1435 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1160.760662] env[68217]: DEBUG nova.compute.manager [req-211a29a2-7d1e-44a9-a604-e1223eb8c883 req-6f46682b-b853-440a-8b22-f825276180b0 service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Received event network-vif-plugged-4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1160.761034] env[68217]: DEBUG oslo_concurrency.lockutils [req-211a29a2-7d1e-44a9-a604-e1223eb8c883 req-6f46682b-b853-440a-8b22-f825276180b0 service nova] Acquiring lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1160.761783] env[68217]: DEBUG oslo_concurrency.lockutils [req-211a29a2-7d1e-44a9-a604-e1223eb8c883 req-6f46682b-b853-440a-8b22-f825276180b0 service nova] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1160.762106] env[68217]: DEBUG oslo_concurrency.lockutils [req-211a29a2-7d1e-44a9-a604-e1223eb8c883 req-6f46682b-b853-440a-8b22-f825276180b0 service nova] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.762398] env[68217]: DEBUG nova.compute.manager [req-211a29a2-7d1e-44a9-a604-e1223eb8c883 req-6f46682b-b853-440a-8b22-f825276180b0 service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] No waiting events found dispatching network-vif-plugged-4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1160.762668] env[68217]: WARNING nova.compute.manager [req-211a29a2-7d1e-44a9-a604-e1223eb8c883 req-6f46682b-b853-440a-8b22-f825276180b0 service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Received unexpected event network-vif-plugged-4873ab51-6a06-44e0-a653-3dfbaa42a0d1 for instance with vm_state building and task_state spawning. [ 1160.858966] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51974bdb-cdf4-4d21-a5e6-9e671448d539 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "6b4dff91-254e-43cc-85cf-7de6214dcafd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.940s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1160.940560] env[68217]: DEBUG nova.network.neutron [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Successfully updated port: 4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1161.249721] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.250780] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.251502] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1161.251796] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1161.253696] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.255937] env[68217]: INFO nova.compute.manager [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Terminating instance [ 1161.264654] env[68217]: DEBUG nova.objects.instance [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'flavor' on Instance uuid 759149be-178f-4238-b9c3-c316d060d6be {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1161.274278] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.274446] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquired lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.274610] env[68217]: DEBUG nova.network.neutron [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.402599] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954db1a5-a7c9-4d5a-9bc7-1e1dbc9ca12d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.439615] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd0c84c9-7635-441c-a8ad-a95fd4aff54a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.444039] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1161.444039] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1161.444039] env[68217]: DEBUG nova.network.neutron [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1161.448792] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance '400058d8-f9ca-41b9-a671-b04b0511d074' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1161.580883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-491d1a5a-9f69-48d5-9f4a-441ac6fa7771 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.321s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.759906] env[68217]: DEBUG nova.compute.manager [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1161.760167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1161.761121] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-323fefa6-7a20-429a-a340-b430c422595f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.771444] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1161.771734] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b7c69a38-47f7-4428-9571-c54574767053 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.773535] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ec5d154c-e91a-4868-8e23-5d011fbe3235 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.293s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1161.780241] env[68217]: DEBUG oslo_vmware.api [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1161.780241] env[68217]: value = "task-2962115" [ 1161.780241] env[68217]: _type = "Task" [ 1161.780241] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.789366] env[68217]: DEBUG oslo_vmware.api [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962115, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.957714] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1161.957714] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5cf67ede-c703-4b1e-a3fe-9842a2d39737 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.965539] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1161.965539] env[68217]: value = "task-2962116" [ 1161.965539] env[68217]: _type = "Task" [ 1161.965539] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.974430] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962116, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.000358] env[68217]: DEBUG nova.network.neutron [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1162.289500] env[68217]: DEBUG oslo_vmware.api [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962115, 'name': PowerOffVM_Task, 'duration_secs': 0.254738} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.289810] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1162.289997] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1162.290259] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bdbf2a2c-b086-4209-b59a-69d2e9274a23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.340198] env[68217]: INFO nova.network.neutron [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Port 96505674-7581-4b19-93ab-1b3fe17ed499 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 1162.340616] env[68217]: DEBUG nova.network.neutron [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [{"id": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "address": "fa:16:3e:40:c3:41", "network": {"id": "246af4c9-69b4-4542-84b9-2afe67cf297a", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-1355653629-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.155", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ef7e30ed571740f3b3ea6b24fc9c6e20", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "78340140-126f-4ef8-a340-debaa64da3e5", "external-id": "nsx-vlan-transportzone-648", "segmentation_id": 648, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3ee5d607-05", "ovs_interfaceid": "3ee5d607-0533-4e05-9447-4840b4e48cdd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.343682] env[68217]: DEBUG nova.network.neutron [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Updating instance_info_cache with network_info: [{"id": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "address": "fa:16:3e:32:b6:1d", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4873ab51-6a", "ovs_interfaceid": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1162.356122] env[68217]: DEBUG oslo_concurrency.lockutils [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.356407] env[68217]: DEBUG oslo_concurrency.lockutils [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.357702] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1162.358301] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1162.358666] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleting the datastore file [datastore2] 6789dd7d-d042-4c29-a963-2b4b982d5b43 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1162.359331] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-75d76a27-d59c-45b7-8531-2c837e86d57e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.365937] env[68217]: DEBUG oslo_vmware.api [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1162.365937] env[68217]: value = "task-2962118" [ 1162.365937] env[68217]: _type = "Task" [ 1162.365937] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.374258] env[68217]: DEBUG oslo_vmware.api [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962118, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.475365] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962116, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.788984] env[68217]: DEBUG nova.compute.manager [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Received event network-changed-4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1162.789246] env[68217]: DEBUG nova.compute.manager [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Refreshing instance network info cache due to event network-changed-4873ab51-6a06-44e0-a653-3dfbaa42a0d1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1162.789397] env[68217]: DEBUG oslo_concurrency.lockutils [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] Acquiring lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1162.814023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.814281] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.814488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1162.814667] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1162.814837] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1162.817080] env[68217]: INFO nova.compute.manager [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Terminating instance [ 1162.844203] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Releasing lock "refresh_cache-6789dd7d-d042-4c29-a963-2b4b982d5b43" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.846080] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1162.846363] env[68217]: DEBUG nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Instance network_info: |[{"id": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "address": "fa:16:3e:32:b6:1d", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4873ab51-6a", "ovs_interfaceid": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1162.846637] env[68217]: DEBUG oslo_concurrency.lockutils [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] Acquired lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1162.846807] env[68217]: DEBUG nova.network.neutron [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Refreshing network info cache for port 4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1162.847907] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:b6:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4873ab51-6a06-44e0-a653-3dfbaa42a0d1', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1162.857278] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.858831] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1162.859082] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7cc14d3d-f6c5-48d1-96a5-217a3da1a072 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.873888] env[68217]: INFO nova.compute.manager [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Detaching volume 60cd88df-fdc0-444d-9f98-a6b1725c8350 [ 1162.884711] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1162.884711] env[68217]: value = "task-2962119" [ 1162.884711] env[68217]: _type = "Task" [ 1162.884711] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.887694] env[68217]: DEBUG oslo_vmware.api [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962118, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.337551} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.890683] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1162.890888] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1162.891113] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1162.891282] env[68217]: INFO nova.compute.manager [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1162.891536] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1162.891746] env[68217]: DEBUG nova.compute.manager [-] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1162.891840] env[68217]: DEBUG nova.network.neutron [-] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1162.899112] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962119, 'name': CreateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.911899] env[68217]: INFO nova.virt.block_device [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Attempting to driver detach volume 60cd88df-fdc0-444d-9f98-a6b1725c8350 from mountpoint /dev/sdb [ 1162.912182] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1162.912346] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594408', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'name': 'volume-60cd88df-fdc0-444d-9f98-a6b1725c8350', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '759149be-178f-4238-b9c3-c316d060d6be', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'serial': '60cd88df-fdc0-444d-9f98-a6b1725c8350'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1162.913218] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c824fba2-6477-4f09-a9cd-6a9d9bc46e42 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.939101] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e7ba6d8-d726-46e1-8ec3-3445e5b2bdb1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.946707] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-294c64cb-3102-4435-82c8-45094c2c598d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.971728] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3cb3a2-e2df-41ad-bc1b-1e4490b3205b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.992532] env[68217]: DEBUG oslo_vmware.api [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962116, 'name': PowerOnVM_Task, 'duration_secs': 0.627876} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.992809] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] The volume has not been displaced from its original location: [datastore1] volume-60cd88df-fdc0-444d-9f98-a6b1725c8350/volume-60cd88df-fdc0-444d-9f98-a6b1725c8350.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1162.998019] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Reconfiguring VM instance instance-00000071 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1162.998357] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1162.998538] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f5f5fcce-0f05-44cd-b711-24d9558a8dc8 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance '400058d8-f9ca-41b9-a671-b04b0511d074' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1163.002171] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-235e8628-8c8c-4734-ad20-99a647c83d47 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.020632] env[68217]: DEBUG oslo_vmware.api [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1163.020632] env[68217]: value = "task-2962120" [ 1163.020632] env[68217]: _type = "Task" [ 1163.020632] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.031112] env[68217]: DEBUG oslo_vmware.api [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962120, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.057966] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_power_states {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1163.320819] env[68217]: DEBUG nova.compute.manager [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1163.321060] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1163.322318] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b44255-365b-4578-b3fd-6db46ea5e27d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.330762] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1163.331084] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1c23d897-f2d3-423b-b81e-6e6e4944729e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.337404] env[68217]: DEBUG oslo_vmware.api [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1163.337404] env[68217]: value = "task-2962121" [ 1163.337404] env[68217]: _type = "Task" [ 1163.337404] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.346292] env[68217]: DEBUG oslo_vmware.api [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962121, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.359285] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7f2a4669-0616-4dda-8387-bf24dbedaf06 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "interface-6789dd7d-d042-4c29-a963-2b4b982d5b43-96505674-7581-4b19-93ab-1b3fe17ed499" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.519s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.396936] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962119, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.529931] env[68217]: DEBUG oslo_vmware.api [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962120, 'name': ReconfigVM_Task, 'duration_secs': 0.284541} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.532420] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Reconfigured VM instance instance-00000071 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1163.537240] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c23b09d-85af-4edd-aeca-bb225f1e05c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.556362] env[68217]: DEBUG oslo_vmware.api [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1163.556362] env[68217]: value = "task-2962122" [ 1163.556362] env[68217]: _type = "Task" [ 1163.556362] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.564196] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Getting list of instances from cluster (obj){ [ 1163.564196] env[68217]: value = "domain-c8" [ 1163.564196] env[68217]: _type = "ClusterComputeResource" [ 1163.564196] env[68217]: } {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1163.566149] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8206c15e-6990-41e8-8df8-eb1ffedd242f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.575533] env[68217]: DEBUG oslo_vmware.api [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962122, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.587975] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Got total of 10 instances {{(pid=68217) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1163.588295] env[68217]: WARNING nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] While synchronizing instance power states, found 12 instances in the database and 10 instances on the hypervisor. [ 1163.588394] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 7056fb29-2a2f-4275-a411-4d5f3fcb421f {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.588547] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid a86015ea-fa6b-4cf8-9d79-273ffa02ec23 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.588709] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.588862] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.589032] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid d28bcf16-b081-4dc8-a975-2acaed222e15 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.589175] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid a4dcc7fb-83e4-4bb9-9c98-9569daee1435 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.589325] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.589471] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 89136574-575c-47da-928c-bd7a5dbb3a98 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.589619] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 400058d8-f9ca-41b9-a671-b04b0511d074 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.589766] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 6789dd7d-d042-4c29-a963-2b4b982d5b43 {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.589910] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 759149be-178f-4238-b9c3-c316d060d6be {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.590056] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Triggering sync for uuid 9844e40f-29ed-48b9-a48f-85fbe10ae2fb {{(pid=68217) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10944}} [ 1163.590428] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.590639] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.590908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.591100] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.591331] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.591646] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.591754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.591924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.592158] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.592332] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.592548] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.592746] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.592965] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "815d1801-fa07-4466-850d-b1a36d630d46" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.593213] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.593392] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "89136574-575c-47da-928c-bd7a5dbb3a98" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.593610] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.593783] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1163.593945] env[68217]: INFO nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] During sync_power_state the instance has a pending task (resize_finish). Skip. [ 1163.594116] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1163.594301] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.594520] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.594849] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1163.595046] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c50a96b6-e805-4dcc-98ba-4d4272959bbd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.597903] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4795c9-a937-4886-93e7-4d6abce7f7a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.601224] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44b5de5c-0ba1-4be2-8d67-390a50d61945 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.603994] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87c1b084-0420-4fa2-8e16-de780bc97d7e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.608806] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a4a38cf-d789-4d60-a4a1-0ae0898be1f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.611592] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b46d19d4-57e5-4c90-93b8-72d79dea17cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.615161] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b539b69e-f444-4039-b6de-0f440572cb0c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.641628] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd1cd16-1b22-45ef-a763-5ba4210ce27e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.847019] env[68217]: DEBUG oslo_vmware.api [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962121, 'name': PowerOffVM_Task, 'duration_secs': 0.289818} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.847290] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1163.847464] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1163.847722] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-aef03466-232f-40aa-9ef0-935015333d8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.898504] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962119, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.008276] env[68217]: DEBUG nova.network.neutron [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Updated VIF entry in instance network info cache for port 4873ab51-6a06-44e0-a653-3dfbaa42a0d1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1164.008673] env[68217]: DEBUG nova.network.neutron [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Updating instance_info_cache with network_info: [{"id": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "address": "fa:16:3e:32:b6:1d", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4873ab51-6a", "ovs_interfaceid": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.068938] env[68217]: DEBUG oslo_vmware.api [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962122, 'name': ReconfigVM_Task, 'duration_secs': 0.147573} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.069132] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594408', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'name': 'volume-60cd88df-fdc0-444d-9f98-a6b1725c8350', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '759149be-178f-4238-b9c3-c316d060d6be', 'attached_at': '', 'detached_at': '', 'volume_id': '60cd88df-fdc0-444d-9f98-a6b1725c8350', 'serial': '60cd88df-fdc0-444d-9f98-a6b1725c8350'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1164.133036] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.541s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.134519] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.542s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.136950] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.546s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.156912] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.564s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.158357] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "815d1801-fa07-4466-850d-b1a36d630d46" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.565s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.158666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "89136574-575c-47da-928c-bd7a5dbb3a98" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.565s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.182636] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "7056fb29-2a2f-4275-a411-4d5f3fcb421f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.592s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1164.398541] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962119, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.511456] env[68217]: DEBUG oslo_concurrency.lockutils [req-1623bf96-c05e-4f54-af74-0a0c365d169a req-fe8662e4-b757-4ac4-a692-2043ca539fdc service nova] Releasing lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1164.585465] env[68217]: DEBUG nova.network.neutron [-] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1164.619043] env[68217]: DEBUG nova.objects.instance [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'flavor' on Instance uuid 759149be-178f-4238-b9c3-c316d060d6be {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1164.813534] env[68217]: DEBUG nova.compute.manager [req-454120a5-5d14-46b9-a680-052280ddb2cd req-80bd9ef4-5932-45cf-8926-f7e1e5c000c5 service nova] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Received event network-vif-deleted-3ee5d607-0533-4e05-9447-4840b4e48cdd {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1164.899330] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962119, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.094094] env[68217]: INFO nova.compute.manager [-] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Took 2.20 seconds to deallocate network for instance. [ 1165.191572] env[68217]: DEBUG nova.network.neutron [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Port 189406da-f39b-4370-b43d-945cbb45afb2 binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1165.191854] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.192082] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.192281] env[68217]: DEBUG nova.network.neutron [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1165.282577] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.282914] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.283170] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.283363] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.283532] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.285594] env[68217]: INFO nova.compute.manager [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Terminating instance [ 1165.399937] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962119, 'name': CreateVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.598192] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1165.598546] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1165.598609] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleting the datastore file [datastore1] a4dcc7fb-83e4-4bb9-9c98-9569daee1435 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1165.598967] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-859430a3-725a-4680-90ea-fbc921440edf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.603943] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1165.604250] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.604546] env[68217]: DEBUG nova.objects.instance [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'resources' on Instance uuid 6789dd7d-d042-4c29-a963-2b4b982d5b43 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.606783] env[68217]: DEBUG oslo_vmware.api [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1165.606783] env[68217]: value = "task-2962124" [ 1165.606783] env[68217]: _type = "Task" [ 1165.606783] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.615433] env[68217]: DEBUG oslo_vmware.api [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962124, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.625518] env[68217]: DEBUG oslo_concurrency.lockutils [None req-396f749c-90f4-40e1-bf76-5614828bc60d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.269s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1165.626558] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "759149be-178f-4238-b9c3-c316d060d6be" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 2.032s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1165.627571] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c11b4d7-9c83-4b9c-9458-63aae63de5b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.789151] env[68217]: DEBUG nova.compute.manager [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1165.789381] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1165.790484] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0cee46-a854-4c9f-a3e1-ec546cf1bc3d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.800867] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1165.801123] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-40d3b374-388f-45b3-ab30-a17d5cfac0ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.807151] env[68217]: DEBUG oslo_vmware.api [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1165.807151] env[68217]: value = "task-2962125" [ 1165.807151] env[68217]: _type = "Task" [ 1165.807151] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.815184] env[68217]: DEBUG oslo_vmware.api [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.902230] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962119, 'name': CreateVM_Task, 'duration_secs': 2.71029} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.902414] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1165.903181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1165.903310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1165.903648] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1165.903887] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b1603ead-8ce9-4497-97c0-88916fa36e1e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1165.910709] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1165.910709] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f68747-a46c-d839-50b9-b7749e9736df" [ 1165.910709] env[68217]: _type = "Task" [ 1165.910709] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1165.918769] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f68747-a46c-d839-50b9-b7749e9736df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1165.994748] env[68217]: DEBUG nova.network.neutron [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.119761] env[68217]: DEBUG oslo_vmware.api [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962124, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.331422} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.120315] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.120315] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.120531] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.120592] env[68217]: INFO nova.compute.manager [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Took 2.80 seconds to destroy the instance on the hypervisor. [ 1166.120798] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1166.121426] env[68217]: DEBUG nova.compute.manager [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1166.121426] env[68217]: DEBUG nova.network.neutron [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1166.136855] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "759149be-178f-4238-b9c3-c316d060d6be" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.270029] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e52e8c68-a419-4eaa-b550-73539d07e17a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.278085] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b80b99b-9f16-4712-a40a-7b48ae7ac7f8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.307396] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63e8df4-f656-4765-ad74-7d18b034ce7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.319465] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c266714a-a04a-4c7b-9ce3-e24234d1b461 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.322998] env[68217]: DEBUG oslo_vmware.api [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962125, 'name': PowerOffVM_Task, 'duration_secs': 0.271786} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.323275] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1166.323439] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1166.323944] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4f380e69-6bba-4a34-92ae-d197df889e85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.334516] env[68217]: DEBUG nova.compute.provider_tree [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1166.381889] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1166.382094] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1166.382285] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleting the datastore file [datastore2] b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1166.382533] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-da4efd61-e0f1-4d29-a9cf-14afbf02f131 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.388881] env[68217]: DEBUG oslo_vmware.api [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1166.388881] env[68217]: value = "task-2962127" [ 1166.388881] env[68217]: _type = "Task" [ 1166.388881] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.397440] env[68217]: DEBUG oslo_vmware.api [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962127, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.419889] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f68747-a46c-d839-50b9-b7749e9736df, 'name': SearchDatastore_Task, 'duration_secs': 0.01068} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.420208] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.420463] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1166.420704] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1166.420865] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1166.421076] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1166.421339] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0689a95-3a51-423d-9447-aa2f58614c8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.429258] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1166.429432] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1166.430184] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fe205ee5-1b76-4bee-a98b-6441ecf3676d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.435070] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1166.435070] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521b596d-be43-5cf8-79c7-87d871b1c27e" [ 1166.435070] env[68217]: _type = "Task" [ 1166.435070] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.442520] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521b596d-be43-5cf8-79c7-87d871b1c27e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1166.497353] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1166.637944] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.638278] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.638396] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "759149be-178f-4238-b9c3-c316d060d6be-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1166.638610] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1166.638801] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1166.640732] env[68217]: INFO nova.compute.manager [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Terminating instance [ 1166.840148] env[68217]: DEBUG nova.scheduler.client.report [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1166.844586] env[68217]: DEBUG nova.compute.manager [req-e49c963d-aa42-400d-9953-02f101f4d576 req-ab2fc199-a641-406e-ad5d-e094f1dc7044 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Received event network-vif-deleted-018f74db-1dcd-49e4-bd11-2ab20c34e986 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1166.844954] env[68217]: INFO nova.compute.manager [req-e49c963d-aa42-400d-9953-02f101f4d576 req-ab2fc199-a641-406e-ad5d-e094f1dc7044 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Neutron deleted interface 018f74db-1dcd-49e4-bd11-2ab20c34e986; detaching it from the instance and deleting it from the info cache [ 1166.845349] env[68217]: DEBUG nova.network.neutron [req-e49c963d-aa42-400d-9953-02f101f4d576 req-ab2fc199-a641-406e-ad5d-e094f1dc7044 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1166.899850] env[68217]: DEBUG oslo_vmware.api [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962127, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173672} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.900330] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1166.903228] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1166.903228] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1166.903228] env[68217]: INFO nova.compute.manager [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1166.903228] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1166.903228] env[68217]: DEBUG nova.compute.manager [-] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1166.903228] env[68217]: DEBUG nova.network.neutron [-] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1166.946069] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521b596d-be43-5cf8-79c7-87d871b1c27e, 'name': SearchDatastore_Task, 'duration_secs': 0.008123} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1166.946483] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d14961b-7c83-4b93-9e21-84a0c103228b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1166.951882] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1166.951882] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]523a648d-02bd-c083-3643-2512f362b31b" [ 1166.951882] env[68217]: _type = "Task" [ 1166.951882] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1166.958915] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523a648d-02bd-c083-3643-2512f362b31b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.000937] env[68217]: DEBUG nova.compute.manager [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68217) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1167.090140] env[68217]: DEBUG nova.network.neutron [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.146763] env[68217]: DEBUG nova.compute.manager [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1167.150020] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1167.150020] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97d07336-a4ee-4de8-b901-f7878d5f9ca2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.157333] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1167.157333] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff3898ec-815b-4633-a6b4-3e0c056dc3f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.163220] env[68217]: DEBUG oslo_vmware.api [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1167.163220] env[68217]: value = "task-2962128" [ 1167.163220] env[68217]: _type = "Task" [ 1167.163220] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.177521] env[68217]: DEBUG oslo_vmware.api [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962128, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.347639] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.743s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.351335] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1167.353022] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b24f0e69-07c0-4387-95a9-85b601aa48d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.362173] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e9cd35-c5c0-4296-ab77-f1d19129e2c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.374751] env[68217]: INFO nova.scheduler.client.report [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted allocations for instance 6789dd7d-d042-4c29-a963-2b4b982d5b43 [ 1167.397615] env[68217]: DEBUG nova.compute.manager [req-e49c963d-aa42-400d-9953-02f101f4d576 req-ab2fc199-a641-406e-ad5d-e094f1dc7044 service nova] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Detach interface failed, port_id=018f74db-1dcd-49e4-bd11-2ab20c34e986, reason: Instance a4dcc7fb-83e4-4bb9-9c98-9569daee1435 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1167.462501] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]523a648d-02bd-c083-3643-2512f362b31b, 'name': SearchDatastore_Task, 'duration_secs': 0.009744} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.463426] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1167.463426] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1167.463426] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-51669c2b-932d-453e-b704-6e6716a2eb55 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.470473] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1167.470473] env[68217]: value = "task-2962129" [ 1167.470473] env[68217]: _type = "Task" [ 1167.470473] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.477943] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962129, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.592492] env[68217]: INFO nova.compute.manager [-] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Took 1.47 seconds to deallocate network for instance. [ 1167.673827] env[68217]: DEBUG oslo_vmware.api [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962128, 'name': PowerOffVM_Task, 'duration_secs': 0.201222} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.674196] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1167.674402] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1167.674712] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2d56cca-f80b-413e-8d50-db5842ae6e94 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.734017] env[68217]: DEBUG nova.network.neutron [-] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1167.738676] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1167.738676] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1167.738891] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleting the datastore file [datastore2] 759149be-178f-4238-b9c3-c316d060d6be {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1167.739142] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-398ccb0f-4cc6-499f-8ee2-ec9f1cd4e2d3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.746022] env[68217]: DEBUG oslo_vmware.api [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1167.746022] env[68217]: value = "task-2962131" [ 1167.746022] env[68217]: _type = "Task" [ 1167.746022] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.756599] env[68217]: DEBUG oslo_vmware.api [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962131, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.856764] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.856996] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.857124] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.857284] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1167.858310] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b1f0084-8ad6-471c-a24c-c84b24eb1e61 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.868432] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32643cdd-c2b2-401d-8cc4-92c58aa76f03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.885656] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c507445e-f260-44b2-bf9c-9381c9410893 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.888707] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c4b8919d-d1e6-4607-819e-76033e5f7778 tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.638s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.889736] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 4.295s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.890637] env[68217]: INFO nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] During sync_power_state the instance has a pending task (deleting). Skip. [ 1167.890637] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "6789dd7d-d042-4c29-a963-2b4b982d5b43" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1167.895024] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3a1a369-95cd-490d-bff7-2d09d8549213 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.925134] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179166MB free_disk=153GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1167.925367] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1167.925630] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1167.982364] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962129, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.504665} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.982614] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1167.982827] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1167.983082] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-88dda88b-dba9-4298-b784-0a4e92973d9b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.989857] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1167.989857] env[68217]: value = "task-2962132" [ 1167.989857] env[68217]: _type = "Task" [ 1167.989857] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.997724] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962132, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.101924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.102427] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.144583] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.144836] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "89136574-575c-47da-928c-bd7a5dbb3a98" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.145055] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.145246] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1168.145415] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "89136574-575c-47da-928c-bd7a5dbb3a98-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1168.147561] env[68217]: INFO nova.compute.manager [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Terminating instance [ 1168.236730] env[68217]: INFO nova.compute.manager [-] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Took 1.34 seconds to deallocate network for instance. [ 1168.255909] env[68217]: DEBUG oslo_vmware.api [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962131, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.251678} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.256182] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1168.256362] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1168.256538] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1168.256707] env[68217]: INFO nova.compute.manager [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1168.256936] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1168.257137] env[68217]: DEBUG nova.compute.manager [-] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1168.257230] env[68217]: DEBUG nova.network.neutron [-] [instance: 759149be-178f-4238-b9c3-c316d060d6be] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1168.498812] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962132, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062746} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.499112] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1168.499884] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fb5ed68-3687-410d-a26a-789a117b2d02 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.521331] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Reconfiguring VM instance instance-00000075 to attach disk [datastore1] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1168.521600] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-18cf9eca-3943-4dc3-9b39-9991390facd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.540738] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1168.540738] env[68217]: value = "task-2962133" [ 1168.540738] env[68217]: _type = "Task" [ 1168.540738] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.548203] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962133, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.651677] env[68217]: DEBUG nova.compute.manager [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1168.651918] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1168.654465] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f8fd1cc-ca59-4fe7-a181-4fcd8925a88b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.662380] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1168.662380] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fea4496b-5e18-41c0-9029-46f3e5895ea6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1168.667620] env[68217]: DEBUG oslo_vmware.api [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1168.667620] env[68217]: value = "task-2962134" [ 1168.667620] env[68217]: _type = "Task" [ 1168.667620] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1168.676082] env[68217]: DEBUG oslo_vmware.api [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962134, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.743769] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1168.906113] env[68217]: DEBUG nova.compute.manager [req-42f4364a-3e49-49e9-bf9a-ed040dcd522e req-7bf004af-524a-4bd4-a5f3-ff0661185f10 service nova] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Received event network-vif-deleted-893a1ea4-f2e7-4d28-a23d-1ffee27ca9f0 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.906113] env[68217]: DEBUG nova.compute.manager [req-42f4364a-3e49-49e9-bf9a-ed040dcd522e req-7bf004af-524a-4bd4-a5f3-ff0661185f10 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Received event network-vif-deleted-ccbc8261-8d58-4e71-9a59-ac46dac31267 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1168.906249] env[68217]: INFO nova.compute.manager [req-42f4364a-3e49-49e9-bf9a-ed040dcd522e req-7bf004af-524a-4bd4-a5f3-ff0661185f10 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Neutron deleted interface ccbc8261-8d58-4e71-9a59-ac46dac31267; detaching it from the instance and deleting it from the info cache [ 1168.906413] env[68217]: DEBUG nova.network.neutron [req-42f4364a-3e49-49e9-bf9a-ed040dcd522e req-7bf004af-524a-4bd4-a5f3-ff0661185f10 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1168.941149] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Applying migration context for instance 400058d8-f9ca-41b9-a671-b04b0511d074 as it has an incoming, in-progress migration e3c97425-57b9-4c9a-b7c7-d35f321e6398. Migration status is reverting {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1168.942459] env[68217]: INFO nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating resource usage from migration e3c97425-57b9-4c9a-b7c7-d35f321e6398 [ 1168.961743] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.961891] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.962026] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.962148] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.962263] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d28bcf16-b081-4dc8-a975-2acaed222e15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.962397] env[68217]: WARNING nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a4dcc7fb-83e4-4bb9-9c98-9569daee1435 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1168.962514] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 89136574-575c-47da-928c-bd7a5dbb3a98 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.962629] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 759149be-178f-4238-b9c3-c316d060d6be actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.962761] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 815d1801-fa07-4466-850d-b1a36d630d46 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.962878] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Migration e3c97425-57b9-4c9a-b7c7-d35f321e6398 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1168.962989] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 400058d8-f9ca-41b9-a671-b04b0511d074 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.963116] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 9844e40f-29ed-48b9-a48f-85fbe10ae2fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1168.963302] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 11 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1168.963429] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2688MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=11 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1169.052143] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962133, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.092010] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7109142-5865-476c-8699-498d457fc949 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.099667] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f39658-588d-40f5-b8ba-69b706d30379 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.130899] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049dd8c0-50a6-4a04-bc62-6299ac976add {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.137723] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f843f5bd-7f96-4149-940e-2d12f3ae0245 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.150707] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1169.164188] env[68217]: DEBUG nova.network.neutron [-] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1169.179020] env[68217]: DEBUG oslo_vmware.api [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962134, 'name': PowerOffVM_Task, 'duration_secs': 0.179006} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.179020] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1169.179020] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1169.179020] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-efa1fce5-4064-4190-8dca-f9582bf005d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.241405] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1169.241614] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1169.241795] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleting the datastore file [datastore1] 89136574-575c-47da-928c-bd7a5dbb3a98 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1169.242067] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e39774f2-f925-4757-a7fb-abf58f220730 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.248772] env[68217]: DEBUG oslo_vmware.api [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for the task: (returnval){ [ 1169.248772] env[68217]: value = "task-2962136" [ 1169.248772] env[68217]: _type = "Task" [ 1169.248772] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.256047] env[68217]: DEBUG oslo_vmware.api [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962136, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.409159] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e02e5327-4bd4-4975-8a31-bb38c7ad85a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.418999] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-694245b6-ef59-4374-a1f6-031f57592424 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.450882] env[68217]: DEBUG nova.compute.manager [req-42f4364a-3e49-49e9-bf9a-ed040dcd522e req-7bf004af-524a-4bd4-a5f3-ff0661185f10 service nova] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Detach interface failed, port_id=ccbc8261-8d58-4e71-9a59-ac46dac31267, reason: Instance 759149be-178f-4238-b9c3-c316d060d6be could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1169.552104] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962133, 'name': ReconfigVM_Task, 'duration_secs': 0.937399} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.552392] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Reconfigured VM instance instance-00000075 to attach disk [datastore1] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1169.553087] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1019cfb1-e9ea-4796-9acc-e39b32fb2f9e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.560102] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1169.560102] env[68217]: value = "task-2962137" [ 1169.560102] env[68217]: _type = "Task" [ 1169.560102] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1169.570219] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962137, 'name': Rename_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1169.666300] env[68217]: INFO nova.compute.manager [-] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Took 1.41 seconds to deallocate network for instance. [ 1169.672028] env[68217]: ERROR nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [req-645d41b8-c62a-4bad-8146-6e153de38eb1] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-645d41b8-c62a-4bad-8146-6e153de38eb1"}]} [ 1169.688189] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1169.704578] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1169.704772] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1169.718568] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1169.736972] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1169.760976] env[68217]: DEBUG oslo_vmware.api [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Task: {'id': task-2962136, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.246894} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1169.761260] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1169.761438] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1169.761672] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1169.761866] env[68217]: INFO nova.compute.manager [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1169.762126] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1169.762305] env[68217]: DEBUG nova.compute.manager [-] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1169.762436] env[68217]: DEBUG nova.network.neutron [-] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1169.890617] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72dea0fb-1a6c-4966-af16-7686f57b7161 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.898293] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d9ad0e-3864-4867-ac6e-bab875b382d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.927846] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb606a18-a3f9-4f07-b6ae-c35ec1a55ee1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.935823] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-998d41d2-3554-46e8-bc89-74012d0d15a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1169.949919] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.070123] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962137, 'name': Rename_Task, 'duration_secs': 0.164578} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.070391] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1170.070630] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-47407852-3882-4161-80ea-d67ab714b922 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.077726] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1170.077726] env[68217]: value = "task-2962138" [ 1170.077726] env[68217]: _type = "Task" [ 1170.077726] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1170.085231] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962138, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1170.176173] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1170.482167] env[68217]: DEBUG nova.network.neutron [-] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1170.484375] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 158 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1170.484618] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 158 to 159 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1170.485178] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1170.587797] env[68217]: DEBUG oslo_vmware.api [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962138, 'name': PowerOnVM_Task, 'duration_secs': 0.490766} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1170.588100] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1170.588310] env[68217]: INFO nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Took 10.23 seconds to spawn the instance on the hypervisor. [ 1170.588489] env[68217]: DEBUG nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1170.589245] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-643a7ae6-bc21-4a54-8d6b-66176aa141df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1170.943239] env[68217]: DEBUG nova.compute.manager [req-656db319-acaf-4140-9263-621c96e4b666 req-a7ceae1d-4ccc-4eaf-a5d9-a859c050a356 service nova] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Received event network-vif-deleted-513d21ef-f0b3-47f7-96ae-f01c23ac3ef1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1170.989751] env[68217]: INFO nova.compute.manager [-] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Took 1.23 seconds to deallocate network for instance. [ 1170.990441] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1170.990646] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.065s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.992321] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.891s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1170.992886] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1170.994439] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 2.892s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.017827] env[68217]: INFO nova.scheduler.client.report [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted allocations for instance a4dcc7fb-83e4-4bb9-9c98-9569daee1435 [ 1171.105418] env[68217]: INFO nova.compute.manager [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Took 15.04 seconds to build instance. [ 1171.499596] env[68217]: DEBUG nova.objects.instance [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'migration_context' on Instance uuid 400058d8-f9ca-41b9-a671-b04b0511d074 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1171.501412] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1171.525047] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92a8fca6-cb92-4c80-be9b-c60ae17411dc tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.711s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.526079] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 7.933s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.526295] env[68217]: INFO nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] During sync_power_state the instance has a pending task (deleting). Skip. [ 1171.526472] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "a4dcc7fb-83e4-4bb9-9c98-9569daee1435" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.607610] env[68217]: DEBUG oslo_concurrency.lockutils [None req-cf234b11-ca3e-4dd6-8744-88b8004a63ca tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.550s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1171.607926] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 8.013s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1171.608136] env[68217]: INFO nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] During sync_power_state the instance has a pending task (spawning). Skip. [ 1171.608317] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1172.169509] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63c334fc-20dd-4fb4-b657-fb4b8785e80e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.182962] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafdf2ed-5a3d-4cd6-8d1a-8dffb19d29e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.212891] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-689b799f-0c32-4361-bc59-8d52713e6d1f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.221215] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5924248d-7530-448d-a86d-92c58466bbf9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1172.236258] env[68217]: DEBUG nova.compute.provider_tree [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1172.739285] env[68217]: DEBUG nova.scheduler.client.report [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1172.972722] env[68217]: DEBUG nova.compute.manager [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Received event network-changed-4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1172.972886] env[68217]: DEBUG nova.compute.manager [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Refreshing instance network info cache due to event network-changed-4873ab51-6a06-44e0-a653-3dfbaa42a0d1. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1172.973113] env[68217]: DEBUG oslo_concurrency.lockutils [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] Acquiring lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1172.973260] env[68217]: DEBUG oslo_concurrency.lockutils [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] Acquired lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1172.973419] env[68217]: DEBUG nova.network.neutron [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Refreshing network info cache for port 4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1172.992791] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.992978] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.993156] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.993306] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.993461] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.993606] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.993755] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1172.993882] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1173.145359] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "01c32252-f6e0-4cb0-966e-622872d49199" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1173.145625] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "01c32252-f6e0-4cb0-966e-622872d49199" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.648728] env[68217]: DEBUG nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1173.691216] env[68217]: DEBUG nova.network.neutron [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Updated VIF entry in instance network info cache for port 4873ab51-6a06-44e0-a653-3dfbaa42a0d1. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1173.691572] env[68217]: DEBUG nova.network.neutron [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Updating instance_info_cache with network_info: [{"id": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "address": "fa:16:3e:32:b6:1d", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.197", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4873ab51-6a", "ovs_interfaceid": "4873ab51-6a06-44e0-a653-3dfbaa42a0d1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.749981] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.755s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1173.756695] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.013s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1173.757069] env[68217]: DEBUG nova.objects.instance [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lazy-loading 'resources' on Instance uuid b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.171027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1174.193810] env[68217]: DEBUG oslo_concurrency.lockutils [req-45a6071b-f87e-4cae-a30f-7a2b0b9c2c82 req-cb163f4a-e7a1-4b8f-8756-6e9d09b7793c service nova] Releasing lock "refresh_cache-9844e40f-29ed-48b9-a48f-85fbe10ae2fb" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1174.395028] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beffc045-93fb-49fe-a014-b7bb425856b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.401673] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02bb20fc-ce5c-40e4-a84c-3fe4e0f360ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.433217] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-579e4202-39e8-42f4-acc3-2bf4c1658104 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.439932] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f4a468c-5010-4fae-a8b7-7f11ba93115a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.452417] env[68217]: DEBUG nova.compute.provider_tree [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1174.956055] env[68217]: DEBUG nova.scheduler.client.report [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1175.292127] env[68217]: INFO nova.compute.manager [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Swapping old allocation on dict_keys(['42aedcce-ee61-45e1-bf10-c06056d1f548']) held by migration e3c97425-57b9-4c9a-b7c7-d35f321e6398 for instance [ 1175.314553] env[68217]: DEBUG nova.scheduler.client.report [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Overwriting current allocation {'allocations': {'42aedcce-ee61-45e1-bf10-c06056d1f548': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 160}}, 'project_id': '90ad2b0a8a0743ca80a0685bf56e0446', 'user_id': '34286c10b8b242fb83eb4f1493b9477b', 'consumer_generation': 1} on consumer 400058d8-f9ca-41b9-a671-b04b0511d074 {{(pid=68217) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1175.386130] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1175.386322] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1175.386495] env[68217]: DEBUG nova.network.neutron [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1175.460049] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.703s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1175.463462] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.287s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1175.463462] env[68217]: DEBUG nova.objects.instance [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'resources' on Instance uuid 759149be-178f-4238-b9c3-c316d060d6be {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1175.478544] env[68217]: INFO nova.scheduler.client.report [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleted allocations for instance b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1 [ 1175.991224] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a75473d7-2277-4299-887a-ef8d7e7cfb40 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.708s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.100303] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca90054d-4926-4632-88fe-f866accdfde8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.108181] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc9987a0-bcee-4b9d-83f5-8f6a12600717 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.140055] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6c5e13d-7fbe-4633-ab0c-31aad0aec35a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.147192] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4b8559d-b23a-446a-b41d-4f8e749ada72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.160168] env[68217]: DEBUG nova.compute.provider_tree [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1176.218907] env[68217]: DEBUG nova.network.neutron [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [{"id": "189406da-f39b-4370-b43d-945cbb45afb2", "address": "fa:16:3e:fa:ff:e7", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap189406da-f3", "ovs_interfaceid": "189406da-f39b-4370-b43d-945cbb45afb2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1176.491229] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.491571] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.491904] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1176.492234] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1176.492533] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1176.495288] env[68217]: INFO nova.compute.manager [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Terminating instance [ 1176.663329] env[68217]: DEBUG nova.scheduler.client.report [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1176.721524] env[68217]: DEBUG oslo_concurrency.lockutils [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-400058d8-f9ca-41b9-a671-b04b0511d074" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1176.722491] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e35fce2f-9fb5-4c5f-868e-cbb595461f0f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.731273] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a493e571-b7a2-4d9d-a2ab-8a397e051841 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1176.998733] env[68217]: DEBUG nova.compute.manager [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1176.999131] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1176.999824] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89509032-b2ef-420c-a567-73fd6839760f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.007713] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1177.007969] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b68d3785-6bff-419f-8e47-367e88a153ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.015583] env[68217]: DEBUG oslo_vmware.api [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1177.015583] env[68217]: value = "task-2962139" [ 1177.015583] env[68217]: _type = "Task" [ 1177.015583] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.023632] env[68217]: DEBUG oslo_vmware.api [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962139, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.168847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.706s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.171976] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.670s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1177.171976] env[68217]: DEBUG nova.objects.instance [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lazy-loading 'resources' on Instance uuid 89136574-575c-47da-928c-bd7a5dbb3a98 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1177.190624] env[68217]: INFO nova.scheduler.client.report [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted allocations for instance 759149be-178f-4238-b9c3-c316d060d6be [ 1177.525542] env[68217]: DEBUG oslo_vmware.api [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962139, 'name': PowerOffVM_Task, 'duration_secs': 0.211229} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1177.525754] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1177.525922] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1177.526178] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-69462fb6-c9a1-4bbc-955a-a47e00d2d3e5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.589072] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1177.589302] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1177.589497] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleting the datastore file [datastore2] e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1177.589774] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-037ce2b5-2802-4e8d-869a-cc208241d006 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.597033] env[68217]: DEBUG oslo_vmware.api [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for the task: (returnval){ [ 1177.597033] env[68217]: value = "task-2962141" [ 1177.597033] env[68217]: _type = "Task" [ 1177.597033] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.605605] env[68217]: DEBUG oslo_vmware.api [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962141, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1177.698172] env[68217]: DEBUG oslo_concurrency.lockutils [None req-6fff1b43-6052-42bb-9a35-03287cc49461 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "759149be-178f-4238-b9c3-c316d060d6be" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.060s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1177.791508] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64dbd4b1-95ab-418f-89c6-a4615f5bc063 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.798931] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ef1c0c-ce49-4180-9ab7-6f1065224285 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.829382] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1177.829833] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-14105107-ad3d-4842-8fc2-b22a6a290466 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.831920] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd380c0-0dae-4a2b-9f96-c79dde869c31 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.839887] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f850045-aa4b-4bde-ace9-658866355cc5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1177.843677] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1177.843677] env[68217]: value = "task-2962142" [ 1177.843677] env[68217]: _type = "Task" [ 1177.843677] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1177.856221] env[68217]: DEBUG nova.compute.provider_tree [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1177.862205] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962142, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.106768] env[68217]: DEBUG oslo_vmware.api [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Task: {'id': task-2962141, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.147178} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.107134] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1178.107134] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1178.107308] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1178.107481] env[68217]: INFO nova.compute.manager [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1178.107749] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1178.107937] env[68217]: DEBUG nova.compute.manager [-] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1178.108042] env[68217]: DEBUG nova.network.neutron [-] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1178.354435] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962142, 'name': PowerOffVM_Task, 'duration_secs': 0.250636} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.354711] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1178.355384] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.355711] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.359273] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.363470] env[68217]: DEBUG nova.scheduler.client.report [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1178.367391] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc217841-5a7d-457c-b0a5-c4360991eeac {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.384702] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1178.384702] env[68217]: value = "task-2962143" [ 1178.384702] env[68217]: _type = "Task" [ 1178.384702] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.395762] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962143, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1178.411188] env[68217]: DEBUG nova.compute.manager [req-814edc4e-a417-4a59-8fe1-03c9a5683f71 req-0c097cf5-08b0-4eed-945d-e94a548659de service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Received event network-vif-deleted-46e4edb9-72c4-4a7d-af91-4b553d829391 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1178.411188] env[68217]: INFO nova.compute.manager [req-814edc4e-a417-4a59-8fe1-03c9a5683f71 req-0c097cf5-08b0-4eed-945d-e94a548659de service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Neutron deleted interface 46e4edb9-72c4-4a7d-af91-4b553d829391; detaching it from the instance and deleting it from the info cache [ 1178.411188] env[68217]: DEBUG nova.network.neutron [req-814edc4e-a417-4a59-8fe1-03c9a5683f71 req-0c097cf5-08b0-4eed-945d-e94a548659de service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.878941] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.708s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1178.881387] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.710s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1178.882892] env[68217]: INFO nova.compute.claims [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1178.898138] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962143, 'name': ReconfigVM_Task, 'duration_secs': 0.209067} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.898446] env[68217]: DEBUG nova.network.neutron [-] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1178.900147] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e1cd803-b861-4430-9b87-257b5b434c73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.906729] env[68217]: INFO nova.scheduler.client.report [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Deleted allocations for instance 89136574-575c-47da-928c-bd7a5dbb3a98 [ 1178.931793] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1178.932090] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1178.932251] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1178.932430] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1178.932573] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1178.932718] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1178.932985] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1178.933080] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1178.933229] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1178.933388] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1178.933587] env[68217]: DEBUG nova.virt.hardware [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1178.937843] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae023581-a9f9-4a2e-ba00-a223abe9f2c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.939782] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0110d433-89ef-4851-bd71-e92f0799e8a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.947510] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1178.947510] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524b57b2-1005-c349-2c4e-6d2122dcbd0e" [ 1178.947510] env[68217]: _type = "Task" [ 1178.947510] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1178.950303] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f2cbe2-20ee-44a9-bb0b-c6496209eea9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.975346] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524b57b2-1005-c349-2c4e-6d2122dcbd0e, 'name': SearchDatastore_Task, 'duration_secs': 0.016768} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1178.988497] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfiguring VM instance instance-0000006f to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1178.988885] env[68217]: DEBUG nova.compute.manager [req-814edc4e-a417-4a59-8fe1-03c9a5683f71 req-0c097cf5-08b0-4eed-945d-e94a548659de service nova] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Detach interface failed, port_id=46e4edb9-72c4-4a7d-af91-4b553d829391, reason: Instance e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1178.989823] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-02e2a6a2-ecec-4279-baef-e2d3a0d1d30e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.007133] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1179.007133] env[68217]: value = "task-2962144" [ 1179.007133] env[68217]: _type = "Task" [ 1179.007133] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.014925] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962144, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.404191] env[68217]: INFO nova.compute.manager [-] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Took 1.30 seconds to deallocate network for instance. [ 1179.445541] env[68217]: DEBUG oslo_concurrency.lockutils [None req-65be46f3-478d-44c1-b965-9ae3129ce91c tempest-AttachInterfacesTestJSON-2077443799 tempest-AttachInterfacesTestJSON-2077443799-project-member] Lock "89136574-575c-47da-928c-bd7a5dbb3a98" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.301s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1179.516461] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962144, 'name': ReconfigVM_Task, 'duration_secs': 0.479694} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1179.516783] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfigured VM instance instance-0000006f to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1179.517633] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1a6287e-10de-4a3f-ac47-c9eae3c463fb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.541568] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfiguring VM instance instance-0000006f to attach disk [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1179.541948] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86d7b344-a0c8-4feb-a8f8-01059f21f7c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.559810] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1179.559810] env[68217]: value = "task-2962145" [ 1179.559810] env[68217]: _type = "Task" [ 1179.559810] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1179.569509] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962145, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1179.910499] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1179.996534] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf06174-20fb-465e-a7ce-9d1ecf750955 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.004221] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fce88e4-80a1-4500-992f-c2906bffa71d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.034101] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-450411e3-afff-4fbd-b783-be6d89a3623d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.041389] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfccbad-223a-4ba3-b369-aea0a3c1e39f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.054286] env[68217]: DEBUG nova.compute.provider_tree [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1180.068930] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962145, 'name': ReconfigVM_Task, 'duration_secs': 0.268195} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.069199] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfigured VM instance instance-0000006f to attach disk [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074/400058d8-f9ca-41b9-a671-b04b0511d074.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1180.069956] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f7c5b6a-1682-47e4-a081-0578cb271458 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.090906] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e007cfa9-8751-4133-b923-4e6edd709ccb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.113113] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2bf40ad-b730-4474-b9a3-5b2461869f84 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.132914] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c7eef90-417f-4a48-9308-b3ae18a8b7a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.140401] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1180.140401] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-92742fef-966e-433f-ac4a-37790eecd1bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1180.147988] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1180.147988] env[68217]: value = "task-2962146" [ 1180.147988] env[68217]: _type = "Task" [ 1180.147988] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1180.157412] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962146, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1180.410035] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1180.410358] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1180.557554] env[68217]: DEBUG nova.scheduler.client.report [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1180.657352] env[68217]: DEBUG oslo_vmware.api [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962146, 'name': PowerOnVM_Task, 'duration_secs': 0.421797} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1180.657651] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1180.913379] env[68217]: DEBUG nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1181.063010] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.182s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1181.063576] env[68217]: DEBUG nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1181.066096] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.156s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1181.066317] env[68217]: DEBUG nova.objects.instance [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lazy-loading 'resources' on Instance uuid e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1181.435194] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1181.569972] env[68217]: DEBUG nova.compute.utils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1181.571416] env[68217]: DEBUG nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1181.571587] env[68217]: DEBUG nova.network.neutron [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1181.625546] env[68217]: DEBUG nova.policy [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0939a9bd52d142818e49fbf0c576e4a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd093c295105c44cca8bd67bd514429d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1181.711250] env[68217]: INFO nova.compute.manager [None req-ca8b60dc-d840-4971-bf00-e24de32fd3f6 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance to original state: 'active' [ 1181.718030] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44f5f2c6-e484-4a6b-b95b-a62289b0b36e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.726497] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ea8362-11f5-4533-a4c6-325aa7647b6e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.761183] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4341b954-28a1-4f6d-a583-5ce0b9c597c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.769387] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be02b3d1-35e8-4fb8-b1af-84073e03319e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1181.783579] env[68217]: DEBUG nova.compute.provider_tree [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1181.981232] env[68217]: DEBUG nova.network.neutron [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Successfully created port: b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1182.074424] env[68217]: DEBUG nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1182.286886] env[68217]: DEBUG nova.scheduler.client.report [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1182.792757] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.726s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1182.795370] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.360s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1182.796964] env[68217]: INFO nova.compute.claims [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1182.818929] env[68217]: INFO nova.scheduler.client.report [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Deleted allocations for instance e8ed78ff-94dd-42d3-8a4d-8e58dc788e55 [ 1183.082967] env[68217]: DEBUG nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1183.110518] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1183.114017] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1183.114017] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1183.114017] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1183.114017] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1183.114017] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1183.115106] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1183.115317] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1183.115546] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1183.115665] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1183.115837] env[68217]: DEBUG nova.virt.hardware [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1183.116716] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c4b877b-ca35-45ec-92db-cb7fea7e6ed4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.127540] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fb22bc-766e-4e20-9b56-a070dedc0e69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.328798] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d366ee1-d6b8-43de-9312-d33fbb5360d0 tempest-ServerRescueNegativeTestJSON-187294080 tempest-ServerRescueNegativeTestJSON-187294080-project-member] Lock "e8ed78ff-94dd-42d3-8a4d-8e58dc788e55" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.837s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.485884] env[68217]: DEBUG nova.compute.manager [req-19ff3059-9783-46cf-84e1-6751eebd6c87 req-83d52477-2ff0-4b6f-a53d-4d654330d42e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Received event network-vif-plugged-b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1183.486120] env[68217]: DEBUG oslo_concurrency.lockutils [req-19ff3059-9783-46cf-84e1-6751eebd6c87 req-83d52477-2ff0-4b6f-a53d-4d654330d42e service nova] Acquiring lock "01c32252-f6e0-4cb0-966e-622872d49199-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.486330] env[68217]: DEBUG oslo_concurrency.lockutils [req-19ff3059-9783-46cf-84e1-6751eebd6c87 req-83d52477-2ff0-4b6f-a53d-4d654330d42e service nova] Lock "01c32252-f6e0-4cb0-966e-622872d49199-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.486495] env[68217]: DEBUG oslo_concurrency.lockutils [req-19ff3059-9783-46cf-84e1-6751eebd6c87 req-83d52477-2ff0-4b6f-a53d-4d654330d42e service nova] Lock "01c32252-f6e0-4cb0-966e-622872d49199-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.486667] env[68217]: DEBUG nova.compute.manager [req-19ff3059-9783-46cf-84e1-6751eebd6c87 req-83d52477-2ff0-4b6f-a53d-4d654330d42e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] No waiting events found dispatching network-vif-plugged-b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1183.486837] env[68217]: WARNING nova.compute.manager [req-19ff3059-9783-46cf-84e1-6751eebd6c87 req-83d52477-2ff0-4b6f-a53d-4d654330d42e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Received unexpected event network-vif-plugged-b00791ed-450f-419a-9745-945fdb5a3713 for instance with vm_state building and task_state spawning. [ 1183.570877] env[68217]: DEBUG nova.network.neutron [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Successfully updated port: b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1183.786999] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.787875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.787875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1183.787875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1183.787875] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1183.790075] env[68217]: INFO nova.compute.manager [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Terminating instance [ 1183.933923] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16227ee0-7b2b-4083-888b-4a39c9f26cbb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.942090] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd80405c-c53e-4f3c-bdb1-dbc7b60fdfd4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.974317] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f7c98ae-3d4c-4821-b641-b7dbb9b0df11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.982206] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501bd68c-faee-4663-87a5-f7c412b5a557 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1183.995824] env[68217]: DEBUG nova.compute.provider_tree [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1184.073722] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1184.073882] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1184.074054] env[68217]: DEBUG nova.network.neutron [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1184.293995] env[68217]: DEBUG nova.compute.manager [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1184.294300] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1184.294577] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4ff34516-4be4-4731-bf9e-2ba660c3844b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.302395] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1184.302395] env[68217]: value = "task-2962147" [ 1184.302395] env[68217]: _type = "Task" [ 1184.302395] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.310689] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962147, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1184.501140] env[68217]: DEBUG nova.scheduler.client.report [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1184.620337] env[68217]: DEBUG nova.network.neutron [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1184.814726] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962147, 'name': PowerOffVM_Task, 'duration_secs': 0.190142} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1184.815339] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1184.820788] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1184.820788] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594405', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'name': 'volume-a82505dd-3944-4311-bbf1-39afd9d16e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '400058d8-f9ca-41b9-a671-b04b0511d074', 'attached_at': '2025-03-12T08:25:16.000000', 'detached_at': '', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'serial': 'a82505dd-3944-4311-bbf1-39afd9d16e72'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1184.820788] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec042a7-9103-4cb1-afaa-07c3d4ace6b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.824700] env[68217]: DEBUG nova.network.neutron [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Updating instance_info_cache with network_info: [{"id": "b00791ed-450f-419a-9745-945fdb5a3713", "address": "fa:16:3e:ee:95:41", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb00791ed-45", "ovs_interfaceid": "b00791ed-450f-419a-9745-945fdb5a3713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1184.846564] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a63f8177-c58f-433a-8c2e-437218b5a165 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.856943] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a6d120a-0e75-4097-923e-b57038832df3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.881112] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dc1b0bb-2706-4623-9f14-ee716d14be1f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.896539] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] The volume has not been displaced from its original location: [datastore2] volume-a82505dd-3944-4311-bbf1-39afd9d16e72/volume-a82505dd-3944-4311-bbf1-39afd9d16e72.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1184.902225] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfiguring VM instance instance-0000006f to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1184.903024] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-750f7325-5faa-4971-840b-cb7243c830f7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1184.923883] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1184.923883] env[68217]: value = "task-2962148" [ 1184.923883] env[68217]: _type = "Task" [ 1184.923883] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1184.934079] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962148, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.010017] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.215s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1185.010670] env[68217]: DEBUG nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1185.347425] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1185.347977] env[68217]: DEBUG nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Instance network_info: |[{"id": "b00791ed-450f-419a-9745-945fdb5a3713", "address": "fa:16:3e:ee:95:41", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb00791ed-45", "ovs_interfaceid": "b00791ed-450f-419a-9745-945fdb5a3713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1185.348426] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ee:95:41', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b00791ed-450f-419a-9745-945fdb5a3713', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1185.356617] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1185.356810] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1185.357168] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9eb92812-94e6-416e-adf9-f6d62414d000 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.377288] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1185.377288] env[68217]: value = "task-2962149" [ 1185.377288] env[68217]: _type = "Task" [ 1185.377288] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.387017] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962149, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.434537] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962148, 'name': ReconfigVM_Task, 'duration_secs': 0.21477} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.434823] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Reconfigured VM instance instance-0000006f to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1185.439949] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-07501fc3-2440-464e-828b-41fffb1e6af8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.455721] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1185.455721] env[68217]: value = "task-2962150" [ 1185.455721] env[68217]: _type = "Task" [ 1185.455721] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.464704] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962150, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.515540] env[68217]: DEBUG nova.compute.utils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1185.517126] env[68217]: DEBUG nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1185.517396] env[68217]: DEBUG nova.network.neutron [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1185.532149] env[68217]: DEBUG nova.compute.manager [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Received event network-changed-b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1185.532420] env[68217]: DEBUG nova.compute.manager [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Refreshing instance network info cache due to event network-changed-b00791ed-450f-419a-9745-945fdb5a3713. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1185.533039] env[68217]: DEBUG oslo_concurrency.lockutils [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] Acquiring lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.533039] env[68217]: DEBUG oslo_concurrency.lockutils [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] Acquired lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.533039] env[68217]: DEBUG nova.network.neutron [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Refreshing network info cache for port b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1185.566119] env[68217]: DEBUG nova.policy [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd229078579a54e6991e85bc49326c0b6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b3068bf39ee943f1bdf378f8b2a5c360', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1185.889599] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962149, 'name': CreateVM_Task, 'duration_secs': 0.328852} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.889599] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1185.898462] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1185.898462] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1185.898462] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1185.898670] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0a5148b0-7583-4478-ae44-7bcb84f91c7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.903756] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1185.903756] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5239e32f-b971-e16a-8213-323f4c84aeea" [ 1185.903756] env[68217]: _type = "Task" [ 1185.903756] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1185.912520] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5239e32f-b971-e16a-8213-323f4c84aeea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1185.931759] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "22c8918b-c67e-467c-8aea-7dff71a8d266" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1185.932018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1185.959999] env[68217]: DEBUG nova.network.neutron [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Successfully created port: ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1185.967473] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962150, 'name': ReconfigVM_Task, 'duration_secs': 0.137989} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1185.967643] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594405', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'name': 'volume-a82505dd-3944-4311-bbf1-39afd9d16e72', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': '400058d8-f9ca-41b9-a671-b04b0511d074', 'attached_at': '2025-03-12T08:25:16.000000', 'detached_at': '', 'volume_id': 'a82505dd-3944-4311-bbf1-39afd9d16e72', 'serial': 'a82505dd-3944-4311-bbf1-39afd9d16e72'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1185.967871] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1185.970494] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f10038a-ae73-454f-a69d-f5691840c991 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1185.975068] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1185.975315] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a0b7a8a4-141a-4370-91ce-63fbb7aecc7b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.021134] env[68217]: DEBUG nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1186.043053] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1186.043270] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1186.043442] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleting the datastore file [datastore1] 400058d8-f9ca-41b9-a671-b04b0511d074 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1186.043767] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7c260540-45e3-4d27-9227-decd8791eebd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.050618] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1186.050618] env[68217]: value = "task-2962152" [ 1186.050618] env[68217]: _type = "Task" [ 1186.050618] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.060023] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962152, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.419554] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5239e32f-b971-e16a-8213-323f4c84aeea, 'name': SearchDatastore_Task, 'duration_secs': 0.010303} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.419554] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.419554] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1186.419554] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1186.419554] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1186.419554] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1186.419554] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-22d7082f-791a-4e5c-b17c-3e52af0a2aa9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.423765] env[68217]: DEBUG nova.network.neutron [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Updated VIF entry in instance network info cache for port b00791ed-450f-419a-9745-945fdb5a3713. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1186.424191] env[68217]: DEBUG nova.network.neutron [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Updating instance_info_cache with network_info: [{"id": "b00791ed-450f-419a-9745-945fdb5a3713", "address": "fa:16:3e:ee:95:41", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb00791ed-45", "ovs_interfaceid": "b00791ed-450f-419a-9745-945fdb5a3713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1186.431319] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1186.431509] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1186.432254] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-60ea282a-ac1b-4b69-892e-82ae2187b07e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.435447] env[68217]: DEBUG nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1186.443317] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1186.443317] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fa994-d03c-ada4-8db2-bac364e67f67" [ 1186.443317] env[68217]: _type = "Task" [ 1186.443317] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.452183] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fa994-d03c-ada4-8db2-bac364e67f67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1186.559874] env[68217]: DEBUG oslo_vmware.api [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962152, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.188938} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.560109] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1186.560287] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1186.560457] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1186.560628] env[68217]: INFO nova.compute.manager [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Took 2.27 seconds to destroy the instance on the hypervisor. [ 1186.560869] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1186.561737] env[68217]: DEBUG nova.compute.manager [-] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1186.561841] env[68217]: DEBUG nova.network.neutron [-] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1186.875760] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.875994] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.927749] env[68217]: DEBUG oslo_concurrency.lockutils [req-6d474114-ba05-4a96-8756-8310757b1a63 req-3499fa6f-0982-4ca1-8ac9-3ac9ce945d0e service nova] Releasing lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.956235] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]528fa994-d03c-ada4-8db2-bac364e67f67, 'name': SearchDatastore_Task, 'duration_secs': 0.020184} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.957153] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa332778-ad1a-4d18-8150-3992e673f584 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.963188] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1186.963188] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52eba82a-c60f-44b6-cec0-558dd2ccf519" [ 1186.963188] env[68217]: _type = "Task" [ 1186.963188] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.967127] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1186.967378] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1186.968949] env[68217]: INFO nova.compute.claims [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1186.976622] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52eba82a-c60f-44b6-cec0-558dd2ccf519, 'name': SearchDatastore_Task, 'duration_secs': 0.009706} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1186.976884] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1186.977155] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 01c32252-f6e0-4cb0-966e-622872d49199/01c32252-f6e0-4cb0-966e-622872d49199.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1186.977406] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ad5c769f-7756-4695-8f40-1a3db706a834 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1186.984237] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1186.984237] env[68217]: value = "task-2962153" [ 1186.984237] env[68217]: _type = "Task" [ 1186.984237] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1186.992539] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962153, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.032538] env[68217]: DEBUG nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1187.062023] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1187.062207] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1187.062358] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1187.062540] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1187.062688] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1187.062837] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1187.063053] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1187.063218] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1187.063394] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1187.063551] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1187.063724] env[68217]: DEBUG nova.virt.hardware [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1187.064795] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4815c7eb-cd56-4891-aa6a-10a823f73ad9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.072557] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6215a92-1eb4-46ab-a703-0a7428a05d52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1187.378861] env[68217]: DEBUG nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1187.495407] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962153, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1187.543467] env[68217]: DEBUG nova.network.neutron [-] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1187.571590] env[68217]: DEBUG nova.compute.manager [req-ee807eaf-0a71-4cdd-8c3c-bf401719636f req-042cc4a4-2a16-4819-bb4f-5e72a5c1e876 service nova] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Received event network-vif-deleted-189406da-f39b-4370-b43d-945cbb45afb2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1187.904588] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1187.993928] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962153, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558254} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1187.994695] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 01c32252-f6e0-4cb0-966e-622872d49199/01c32252-f6e0-4cb0-966e-622872d49199.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1187.995164] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1187.995579] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-19651e6c-a8a3-4110-b32a-f417d0e801c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.007018] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1188.007018] env[68217]: value = "task-2962154" [ 1188.007018] env[68217]: _type = "Task" [ 1188.007018] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.012775] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962154, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.046703] env[68217]: INFO nova.compute.manager [-] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Took 1.48 seconds to deallocate network for instance. [ 1188.048461] env[68217]: DEBUG nova.network.neutron [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Successfully updated port: ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1188.117805] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d69098d-4dbf-4216-ad1f-0eca2dc9bd23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.124895] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d9df8e6-7657-4c12-8ed6-5d7a6868e6fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.154212] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db2afe63-c07e-47a4-a14b-f0d1b87362aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.161470] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2b53e62-d9f0-4038-b751-579af50a7ea6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.174265] env[68217]: DEBUG nova.compute.provider_tree [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1188.515253] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962154, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069891} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1188.515530] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1188.516365] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23ce181a-d35e-4c53-8d15-4e68632962e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.539313] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Reconfiguring VM instance instance-00000076 to attach disk [datastore2] 01c32252-f6e0-4cb0-966e-622872d49199/01c32252-f6e0-4cb0-966e-622872d49199.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1188.539536] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f9c98593-122a-42d5-8ceb-331724199e72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1188.553685] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1188.553829] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1188.554150] env[68217]: DEBUG nova.network.neutron [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1188.560190] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1188.560190] env[68217]: value = "task-2962155" [ 1188.560190] env[68217]: _type = "Task" [ 1188.560190] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1188.568692] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962155, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1188.595813] env[68217]: INFO nova.compute.manager [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Took 0.55 seconds to detach 1 volumes for instance. [ 1188.677035] env[68217]: DEBUG nova.scheduler.client.report [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1189.069639] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962155, 'name': ReconfigVM_Task, 'duration_secs': 0.29607} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.069921] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Reconfigured VM instance instance-00000076 to attach disk [datastore2] 01c32252-f6e0-4cb0-966e-622872d49199/01c32252-f6e0-4cb0-966e-622872d49199.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1189.070634] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-292c7dfb-0e24-48a2-9c5c-712acd5fc31e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.077492] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1189.077492] env[68217]: value = "task-2962156" [ 1189.077492] env[68217]: _type = "Task" [ 1189.077492] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.085187] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962156, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.105381] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.108480] env[68217]: DEBUG nova.network.neutron [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1189.181817] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.182363] env[68217]: DEBUG nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1189.185050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.281s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.186453] env[68217]: INFO nova.compute.claims [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1189.259731] env[68217]: DEBUG nova.network.neutron [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updating instance_info_cache with network_info: [{"id": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "address": "fa:16:3e:06:41:09", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab455e1f-12", "ovs_interfaceid": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1189.587519] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962156, 'name': Rename_Task, 'duration_secs': 0.135968} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1189.587912] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1189.588125] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ef870ea-dda9-4a7f-8a5e-601e552bcade {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.594237] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1189.594237] env[68217]: value = "task-2962157" [ 1189.594237] env[68217]: _type = "Task" [ 1189.594237] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.596480] env[68217]: DEBUG nova.compute.manager [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Received event network-vif-plugged-ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.596683] env[68217]: DEBUG oslo_concurrency.lockutils [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] Acquiring lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1189.596890] env[68217]: DEBUG oslo_concurrency.lockutils [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1189.597070] env[68217]: DEBUG oslo_concurrency.lockutils [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1189.597241] env[68217]: DEBUG nova.compute.manager [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] No waiting events found dispatching network-vif-plugged-ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1189.597404] env[68217]: WARNING nova.compute.manager [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Received unexpected event network-vif-plugged-ab455e1f-1232-4fd4-a71b-b73ce15172ff for instance with vm_state building and task_state spawning. [ 1189.597622] env[68217]: DEBUG nova.compute.manager [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Received event network-changed-ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1189.597733] env[68217]: DEBUG nova.compute.manager [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Refreshing instance network info cache due to event network-changed-ab455e1f-1232-4fd4-a71b-b73ce15172ff. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1189.597903] env[68217]: DEBUG oslo_concurrency.lockutils [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] Acquiring lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1189.606462] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962157, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1189.690755] env[68217]: DEBUG nova.compute.utils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1189.694136] env[68217]: DEBUG nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1189.694801] env[68217]: DEBUG nova.network.neutron [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1189.757106] env[68217]: DEBUG nova.policy [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4237c028c4744612a7e0ee59ff26b914', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b669701704154ef9b91f04d4d36c0564', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1189.762335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1189.762689] env[68217]: DEBUG nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Instance network_info: |[{"id": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "address": "fa:16:3e:06:41:09", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab455e1f-12", "ovs_interfaceid": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1189.762998] env[68217]: DEBUG oslo_concurrency.lockutils [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] Acquired lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1189.763200] env[68217]: DEBUG nova.network.neutron [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Refreshing network info cache for port ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1189.764459] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:41:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '69e1a5c1-4ae7-409b-8de7-d401684ef60d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ab455e1f-1232-4fd4-a71b-b73ce15172ff', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1189.772010] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1189.774844] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1189.775344] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ab972fd0-a4e2-4b12-8c99-0e3716744d21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1189.797988] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1189.797988] env[68217]: value = "task-2962158" [ 1189.797988] env[68217]: _type = "Task" [ 1189.797988] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1189.807035] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962158, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.110159] env[68217]: DEBUG oslo_vmware.api [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962157, 'name': PowerOnVM_Task, 'duration_secs': 0.43828} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.111015] env[68217]: DEBUG nova.network.neutron [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updated VIF entry in instance network info cache for port ab455e1f-1232-4fd4-a71b-b73ce15172ff. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1190.111360] env[68217]: DEBUG nova.network.neutron [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updating instance_info_cache with network_info: [{"id": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "address": "fa:16:3e:06:41:09", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab455e1f-12", "ovs_interfaceid": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1190.112599] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1190.112703] env[68217]: INFO nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Took 7.03 seconds to spawn the instance on the hypervisor. [ 1190.112884] env[68217]: DEBUG nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1190.113845] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76ae4559-db2d-4d3c-8410-f72f69520aa1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.187560] env[68217]: DEBUG nova.network.neutron [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Successfully created port: 306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1190.195488] env[68217]: DEBUG nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1190.309475] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962158, 'name': CreateVM_Task, 'duration_secs': 0.297004} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.309659] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1190.310375] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.310540] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.310845] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1190.311115] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-56c12ee6-8c8e-4893-9b75-5bf47faf8269 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.315405] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1190.315405] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cf9268-083f-637d-27e2-45e4714663a9" [ 1190.315405] env[68217]: _type = "Task" [ 1190.315405] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.325353] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cf9268-083f-637d-27e2-45e4714663a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.339634] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a5111cb-0687-4ec1-8b78-5b28c54b2614 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.346064] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fede7fdb-9026-4c33-94b6-6dfcd4a59380 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.375963] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49013c3d-5e03-4b55-961c-a6b1932822f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.383395] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09255811-2534-401c-8f46-da4ba263780c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.397812] env[68217]: DEBUG nova.compute.provider_tree [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1190.614215] env[68217]: DEBUG oslo_concurrency.lockutils [req-61ea9487-fdb3-4f85-9083-1dfa2a931b32 req-7269a1fd-c9b8-407d-b6e1-7525aeace6cf service nova] Releasing lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.629697] env[68217]: INFO nova.compute.manager [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Took 16.48 seconds to build instance. [ 1190.827960] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cf9268-083f-637d-27e2-45e4714663a9, 'name': SearchDatastore_Task, 'duration_secs': 0.011178} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1190.828359] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1190.828600] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1190.828837] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1190.828986] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1190.829181] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1190.829440] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc29f72f-293b-4e35-b0ca-ec362eb5a011 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.838800] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1190.838973] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1190.839690] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-79448e3b-96d5-42be-b53b-1773af7522a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1190.845536] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1190.845536] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a775bd-859f-1213-fefd-0b350b3fec41" [ 1190.845536] env[68217]: _type = "Task" [ 1190.845536] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1190.854124] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a775bd-859f-1213-fefd-0b350b3fec41, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1190.900751] env[68217]: DEBUG nova.scheduler.client.report [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1191.131848] env[68217]: DEBUG oslo_concurrency.lockutils [None req-8d326228-4660-44a1-96d7-a042276f7ab7 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "01c32252-f6e0-4cb0-966e-622872d49199" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 17.986s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.207856] env[68217]: DEBUG nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1191.233584] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1191.234229] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1191.234229] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1191.234229] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1191.234375] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1191.234602] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1191.234686] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1191.234817] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1191.234982] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1191.235231] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1191.235320] env[68217]: DEBUG nova.virt.hardware [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1191.236212] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fee65f-a4ce-4643-aa49-f5eea4293886 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.244708] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd951e03-6a8d-4b6a-b6d3-80dfe48a7290 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.356817] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a775bd-859f-1213-fefd-0b350b3fec41, 'name': SearchDatastore_Task, 'duration_secs': 0.008624} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.357645] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-158574a8-999d-4068-b26f-2f3b40de8456 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.362934] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1191.362934] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b48544-bddf-1cbc-81fc-e4cad48801e7" [ 1191.362934] env[68217]: _type = "Task" [ 1191.362934] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.370743] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b48544-bddf-1cbc-81fc-e4cad48801e7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.405722] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.406257] env[68217]: DEBUG nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1191.408849] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.303s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1191.408991] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1191.429759] env[68217]: INFO nova.scheduler.client.report [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted allocations for instance 400058d8-f9ca-41b9-a671-b04b0511d074 [ 1191.629781] env[68217]: DEBUG nova.compute.manager [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Received event network-changed-b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1191.629988] env[68217]: DEBUG nova.compute.manager [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Refreshing instance network info cache due to event network-changed-b00791ed-450f-419a-9745-945fdb5a3713. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1191.630212] env[68217]: DEBUG oslo_concurrency.lockutils [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] Acquiring lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1191.630366] env[68217]: DEBUG oslo_concurrency.lockutils [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] Acquired lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1191.630528] env[68217]: DEBUG nova.network.neutron [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Refreshing network info cache for port b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1191.869314] env[68217]: DEBUG nova.network.neutron [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Successfully updated port: 306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1191.876721] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b48544-bddf-1cbc-81fc-e4cad48801e7, 'name': SearchDatastore_Task, 'duration_secs': 0.009985} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1191.877594] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1191.878035] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c/5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1191.878289] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1679edd7-6d08-437f-8335-60352cb8a09f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1191.885412] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1191.885412] env[68217]: value = "task-2962160" [ 1191.885412] env[68217]: _type = "Task" [ 1191.885412] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1191.893938] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962160, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1191.912719] env[68217]: DEBUG nova.compute.utils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1191.914180] env[68217]: DEBUG nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1191.937652] env[68217]: DEBUG oslo_concurrency.lockutils [None req-129a5744-6c2b-46f2-98d6-98ded7e5458c tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "400058d8-f9ca-41b9-a671-b04b0511d074" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.150s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1192.350636] env[68217]: DEBUG nova.network.neutron [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Updated VIF entry in instance network info cache for port b00791ed-450f-419a-9745-945fdb5a3713. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1192.351071] env[68217]: DEBUG nova.network.neutron [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Updating instance_info_cache with network_info: [{"id": "b00791ed-450f-419a-9745-945fdb5a3713", "address": "fa:16:3e:ee:95:41", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb00791ed-45", "ovs_interfaceid": "b00791ed-450f-419a-9745-945fdb5a3713", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1192.372392] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1192.372559] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquired lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1192.372758] env[68217]: DEBUG nova.network.neutron [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1192.395597] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962160, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.483581} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.395881] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c/5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1192.396075] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1192.396313] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-3676a0a0-57d9-4775-a899-aa1fcc5f69f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.402872] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1192.402872] env[68217]: value = "task-2962161" [ 1192.402872] env[68217]: _type = "Task" [ 1192.402872] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.410625] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962161, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1192.415245] env[68217]: DEBUG nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1192.853929] env[68217]: DEBUG oslo_concurrency.lockutils [req-6f3f5ebf-c19f-472f-887b-3285b90f994b req-a9468bae-3ca9-4334-a299-801ae0bb4d69 service nova] Releasing lock "refresh_cache-01c32252-f6e0-4cb0-966e-622872d49199" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1192.913392] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962161, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072324} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1192.914175] env[68217]: DEBUG nova.network.neutron [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1192.916019] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1192.916810] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a21e08-2597-49dd-a61f-d234b7314119 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.942517] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Reconfiguring VM instance instance-00000077 to attach disk [datastore2] 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c/5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1192.943102] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-925327cd-842c-449d-a6ab-a7b6cfe95b86 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1192.966257] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1192.966257] env[68217]: value = "task-2962162" [ 1192.966257] env[68217]: _type = "Task" [ 1192.966257] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1192.974416] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.133195] env[68217]: DEBUG nova.network.neutron [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Updating instance_info_cache with network_info: [{"id": "306424b1-5a55-49af-a82a-c5cc49304899", "address": "fa:16:3e:1e:7b:4b", "network": {"id": "8b51e7d9-8ebe-4854-9b02-3b7f7afff71c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-391603824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b669701704154ef9b91f04d4d36c0564", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap306424b1-5a", "ovs_interfaceid": "306424b1-5a55-49af-a82a-c5cc49304899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1193.278877] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.279213] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.445061] env[68217]: DEBUG nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1193.472413] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1193.472658] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1193.472813] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1193.472993] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1193.473157] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1193.473302] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1193.473506] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1193.473661] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1193.473825] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1193.473988] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1193.474178] env[68217]: DEBUG nova.virt.hardware [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1193.474943] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3858c58c-92d3-403c-8bd4-8ae932ade4ff {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.483170] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962162, 'name': ReconfigVM_Task, 'duration_secs': 0.274196} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1193.483418] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Reconfigured VM instance instance-00000077 to attach disk [datastore2] 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c/5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1193.485452] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7abb9397-3d0c-4ed8-9bc4-dcdce8559080 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.487841] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bd368d6-694e-4297-bb6c-a275bec10173 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.501090] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1193.506432] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Creating folder: Project (1a203d239e54467cba305247bf5d0c19). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1193.507527] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7fbeb950-4709-4be6-a48e-f782de653ba1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.509031] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1193.509031] env[68217]: value = "task-2962163" [ 1193.509031] env[68217]: _type = "Task" [ 1193.509031] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.516223] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962163, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.517287] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Created folder: Project (1a203d239e54467cba305247bf5d0c19) in parent group-v594094. [ 1193.517465] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Creating folder: Instances. Parent ref: group-v594413. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1193.517668] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d0740c15-86a7-49d4-ae2f-090baa255e39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.526251] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Created folder: Instances in parent group-v594413. [ 1193.526472] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1193.526677] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1193.526887] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8c1937e6-7e00-4e0d-9e6b-b14715e2a2a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.541947] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1193.541947] env[68217]: value = "task-2962166" [ 1193.541947] env[68217]: _type = "Task" [ 1193.541947] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.549073] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962166, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.620880] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.621327] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.621574] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "815d1801-fa07-4466-850d-b1a36d630d46-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.621811] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.622073] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.624757] env[68217]: INFO nova.compute.manager [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Terminating instance [ 1193.635488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Releasing lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1193.635865] env[68217]: DEBUG nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Instance network_info: |[{"id": "306424b1-5a55-49af-a82a-c5cc49304899", "address": "fa:16:3e:1e:7b:4b", "network": {"id": "8b51e7d9-8ebe-4854-9b02-3b7f7afff71c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-391603824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b669701704154ef9b91f04d4d36c0564", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap306424b1-5a", "ovs_interfaceid": "306424b1-5a55-49af-a82a-c5cc49304899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1193.636269] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1e:7b:4b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f0ef5aba-bd9a-42ff-a1a0-5e763986d70a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '306424b1-5a55-49af-a82a-c5cc49304899', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1193.644350] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Creating folder: Project (b669701704154ef9b91f04d4d36c0564). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1193.645246] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b905ca5f-3d08-4af2-a153-47c7917a813a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.658327] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Created folder: Project (b669701704154ef9b91f04d4d36c0564) in parent group-v594094. [ 1193.658530] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Creating folder: Instances. Parent ref: group-v594416. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1193.658858] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b768884f-1278-43a1-97d5-7e9f43b6bff2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.671195] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Created folder: Instances in parent group-v594416. [ 1193.671195] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1193.671195] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1193.671195] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-671805a0-6b07-4eab-9baa-91d4f7135385 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1193.688567] env[68217]: DEBUG nova.compute.manager [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Received event network-vif-plugged-306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.688741] env[68217]: DEBUG oslo_concurrency.lockutils [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] Acquiring lock "22c8918b-c67e-467c-8aea-7dff71a8d266-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1193.689022] env[68217]: DEBUG oslo_concurrency.lockutils [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1193.689070] env[68217]: DEBUG oslo_concurrency.lockutils [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1193.689227] env[68217]: DEBUG nova.compute.manager [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] No waiting events found dispatching network-vif-plugged-306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1193.689353] env[68217]: WARNING nova.compute.manager [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Received unexpected event network-vif-plugged-306424b1-5a55-49af-a82a-c5cc49304899 for instance with vm_state building and task_state spawning. [ 1193.689506] env[68217]: DEBUG nova.compute.manager [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Received event network-changed-306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1193.689656] env[68217]: DEBUG nova.compute.manager [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Refreshing instance network info cache due to event network-changed-306424b1-5a55-49af-a82a-c5cc49304899. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1193.689836] env[68217]: DEBUG oslo_concurrency.lockutils [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] Acquiring lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1193.689964] env[68217]: DEBUG oslo_concurrency.lockutils [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] Acquired lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1193.690152] env[68217]: DEBUG nova.network.neutron [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Refreshing network info cache for port 306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1193.696443] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1193.696443] env[68217]: value = "task-2962169" [ 1193.696443] env[68217]: _type = "Task" [ 1193.696443] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1193.703865] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962169, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1193.781968] env[68217]: DEBUG nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1194.018787] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962163, 'name': Rename_Task, 'duration_secs': 0.167022} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.019145] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1194.019437] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dddcbf49-696f-4fdb-95ac-eda4013c9a68 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.026355] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1194.026355] env[68217]: value = "task-2962170" [ 1194.026355] env[68217]: _type = "Task" [ 1194.026355] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.033808] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962170, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.051743] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962166, 'name': CreateVM_Task, 'duration_secs': 0.286172} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.051917] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1194.052327] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.052485] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.052809] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1194.053049] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ffd0e6b-767a-480f-ad9e-e642e81cec63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.058386] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1194.058386] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b20b79-c6db-123c-3e67-34880c717ed9" [ 1194.058386] env[68217]: _type = "Task" [ 1194.058386] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.065798] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b20b79-c6db-123c-3e67-34880c717ed9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.129422] env[68217]: DEBUG nova.compute.manager [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1194.129676] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1194.130585] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1e01e72-99ca-4c29-ada0-ae9e0eb55343 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.137592] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1194.137895] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-860f4555-40f6-4d07-ae41-a344d4768988 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.143588] env[68217]: DEBUG oslo_vmware.api [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1194.143588] env[68217]: value = "task-2962171" [ 1194.143588] env[68217]: _type = "Task" [ 1194.143588] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.151784] env[68217]: DEBUG oslo_vmware.api [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.214922] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962169, 'name': CreateVM_Task, 'duration_secs': 0.312429} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.215155] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1194.216281] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.216589] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.216971] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1194.217337] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-85f39231-4482-4007-a90e-6fa824f0ca9e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.226738] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1194.226738] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cec95b-40cb-2020-a785-0f9c9e32e477" [ 1194.226738] env[68217]: _type = "Task" [ 1194.226738] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.242148] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cec95b-40cb-2020-a785-0f9c9e32e477, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.313592] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1194.314086] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1194.316015] env[68217]: INFO nova.compute.claims [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1194.499199] env[68217]: DEBUG nova.network.neutron [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Updated VIF entry in instance network info cache for port 306424b1-5a55-49af-a82a-c5cc49304899. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1194.499676] env[68217]: DEBUG nova.network.neutron [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Updating instance_info_cache with network_info: [{"id": "306424b1-5a55-49af-a82a-c5cc49304899", "address": "fa:16:3e:1e:7b:4b", "network": {"id": "8b51e7d9-8ebe-4854-9b02-3b7f7afff71c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-391603824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b669701704154ef9b91f04d4d36c0564", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap306424b1-5a", "ovs_interfaceid": "306424b1-5a55-49af-a82a-c5cc49304899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1194.538777] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962170, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.569207] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b20b79-c6db-123c-3e67-34880c717ed9, 'name': SearchDatastore_Task, 'duration_secs': 0.013502} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.569514] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.569748] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1194.569980] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.570177] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.570407] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1194.570738] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-924e267c-9268-4a37-949a-d5ee0a9151e1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.582886] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1194.583107] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1194.583812] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a835edb-7b6d-4386-ada6-a1dec7938a92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.590027] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1194.590027] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522228f9-0e7e-1d88-7e13-7687d991d129" [ 1194.590027] env[68217]: _type = "Task" [ 1194.590027] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.598438] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522228f9-0e7e-1d88-7e13-7687d991d129, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.654129] env[68217]: DEBUG oslo_vmware.api [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962171, 'name': PowerOffVM_Task, 'duration_secs': 0.21999} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.654438] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1194.654613] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1194.654898] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ebdfbc3f-733e-4ea4-aca9-0c7335940ce7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.725927] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1194.726236] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1194.726434] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleting the datastore file [datastore2] 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1194.726710] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5f8af12b-7eb0-4f8d-8e6a-b270db09141b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.739600] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cec95b-40cb-2020-a785-0f9c9e32e477, 'name': SearchDatastore_Task, 'duration_secs': 0.013913} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1194.741043] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1194.741355] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1194.741618] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1194.741804] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1194.742030] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1194.742362] env[68217]: DEBUG oslo_vmware.api [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for the task: (returnval){ [ 1194.742362] env[68217]: value = "task-2962174" [ 1194.742362] env[68217]: _type = "Task" [ 1194.742362] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.742593] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-44ddbfa9-803e-4259-ad99-f3634f5f975e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.753238] env[68217]: DEBUG oslo_vmware.api [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1194.766254] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1194.766452] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1194.767312] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddcd8fcc-53fc-4eaf-bc9c-c056d6e72da5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1194.773346] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1194.773346] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cab4bb-2219-283c-08ca-181cb8775928" [ 1194.773346] env[68217]: _type = "Task" [ 1194.773346] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1194.782468] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cab4bb-2219-283c-08ca-181cb8775928, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.002776] env[68217]: DEBUG oslo_concurrency.lockutils [req-f56cfaea-d90f-4280-8d49-141a8add7b37 req-55b8dcb1-4b46-4b52-adc3-508101ad6fe3 service nova] Releasing lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.037961] env[68217]: DEBUG oslo_vmware.api [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962170, 'name': PowerOnVM_Task, 'duration_secs': 0.518621} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.038240] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1195.038441] env[68217]: INFO nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Took 8.01 seconds to spawn the instance on the hypervisor. [ 1195.038622] env[68217]: DEBUG nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1195.039462] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a98318e-68ab-4716-b678-ca51691fcda2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.100966] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522228f9-0e7e-1d88-7e13-7687d991d129, 'name': SearchDatastore_Task, 'duration_secs': 0.023159} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.101716] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5ad9a349-74ab-46c3-b244-07c358b6ac7c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.107614] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1195.107614] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ccea9b-292d-aed7-6a32-f965ca6b5995" [ 1195.107614] env[68217]: _type = "Task" [ 1195.107614] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.115212] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ccea9b-292d-aed7-6a32-f965ca6b5995, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.254540] env[68217]: DEBUG oslo_vmware.api [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962174, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.282671] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cab4bb-2219-283c-08ca-181cb8775928, 'name': SearchDatastore_Task, 'duration_secs': 0.023925} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.283562] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2483f665-c948-45df-8e7a-51fd854a6286 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.289532] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1195.289532] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d41cfd-e1c5-d44a-8fe9-deb90135914a" [ 1195.289532] env[68217]: _type = "Task" [ 1195.289532] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.297031] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d41cfd-e1c5-d44a-8fe9-deb90135914a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.448471] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23829036-2720-470c-9046-8ecbe717263f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.456621] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d5330a-ad6c-4c61-aa8b-f472527be744 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.488898] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3df9ca7d-5ab9-422d-85bf-3ad4d577ec7f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.497154] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-404aa33c-b390-4915-abaa-3452bb4b429e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.513701] env[68217]: DEBUG nova.compute.provider_tree [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1195.557538] env[68217]: INFO nova.compute.manager [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Took 14.14 seconds to build instance. [ 1195.618352] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ccea9b-292d-aed7-6a32-f965ca6b5995, 'name': SearchDatastore_Task, 'duration_secs': 0.037011} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.618618] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.618901] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1195.619185] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81ebacf4-b35c-4493-9dec-8133781ac5cc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.626162] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1195.626162] env[68217]: value = "task-2962175" [ 1195.626162] env[68217]: _type = "Task" [ 1195.626162] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.634028] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962175, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.756187] env[68217]: DEBUG oslo_vmware.api [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Task: {'id': task-2962174, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.606219} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.756447] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1195.756633] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1195.756815] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1195.757018] env[68217]: INFO nova.compute.manager [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Took 1.63 seconds to destroy the instance on the hypervisor. [ 1195.757280] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1195.757474] env[68217]: DEBUG nova.compute.manager [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1195.757570] env[68217]: DEBUG nova.network.neutron [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1195.800869] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d41cfd-e1c5-d44a-8fe9-deb90135914a, 'name': SearchDatastore_Task, 'duration_secs': 0.043789} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1195.802990] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1195.803272] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 22c8918b-c67e-467c-8aea-7dff71a8d266/22c8918b-c67e-467c-8aea-7dff71a8d266.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1195.803538] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-863c12e1-712f-487f-9e0e-f27af9c8a30d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1195.810421] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1195.810421] env[68217]: value = "task-2962176" [ 1195.810421] env[68217]: _type = "Task" [ 1195.810421] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1195.819292] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962176, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1195.964837] env[68217]: DEBUG nova.compute.manager [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Received event network-changed-ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1195.964837] env[68217]: DEBUG nova.compute.manager [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Refreshing instance network info cache due to event network-changed-ab455e1f-1232-4fd4-a71b-b73ce15172ff. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1195.965118] env[68217]: DEBUG oslo_concurrency.lockutils [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] Acquiring lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1195.965296] env[68217]: DEBUG oslo_concurrency.lockutils [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] Acquired lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1195.965483] env[68217]: DEBUG nova.network.neutron [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Refreshing network info cache for port ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1196.016311] env[68217]: DEBUG nova.scheduler.client.report [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1196.059418] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573559e4-b96c-4de7-b28a-0f878a5c989d tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 15.649s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.139195] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962175, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.288075] env[68217]: DEBUG nova.compute.manager [req-b4cabe53-9db9-4f06-bac5-ffe821a2b931 req-234ac488-a050-4f66-a385-efd927865bc6 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Received event network-vif-deleted-c0f23ace-2be2-4dca-b47a-a5b77ba68dd3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1196.288296] env[68217]: INFO nova.compute.manager [req-b4cabe53-9db9-4f06-bac5-ffe821a2b931 req-234ac488-a050-4f66-a385-efd927865bc6 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Neutron deleted interface c0f23ace-2be2-4dca-b47a-a5b77ba68dd3; detaching it from the instance and deleting it from the info cache [ 1196.288471] env[68217]: DEBUG nova.network.neutron [req-b4cabe53-9db9-4f06-bac5-ffe821a2b931 req-234ac488-a050-4f66-a385-efd927865bc6 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.325310] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962176, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.522767] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.208s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1196.525452] env[68217]: DEBUG nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1196.636728] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962175, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.805411} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.636981] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1196.637217] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1196.639673] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-098f983c-8498-4da1-a06b-eecab1730b96 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.646582] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1196.646582] env[68217]: value = "task-2962178" [ 1196.646582] env[68217]: _type = "Task" [ 1196.646582] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.654312] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962178, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1196.701102] env[68217]: DEBUG nova.network.neutron [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updated VIF entry in instance network info cache for port ab455e1f-1232-4fd4-a71b-b73ce15172ff. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1196.701488] env[68217]: DEBUG nova.network.neutron [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updating instance_info_cache with network_info: [{"id": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "address": "fa:16:3e:06:41:09", "network": {"id": "e7f56c12-ca87-40ad-b72d-955989c48237", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-417650994-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.209", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b3068bf39ee943f1bdf378f8b2a5c360", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "69e1a5c1-4ae7-409b-8de7-d401684ef60d", "external-id": "nsx-vlan-transportzone-740", "segmentation_id": 740, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapab455e1f-12", "ovs_interfaceid": "ab455e1f-1232-4fd4-a71b-b73ce15172ff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.760627] env[68217]: DEBUG nova.network.neutron [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1196.795199] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-968af87c-4eca-4fc3-a5f7-49831c4b5c78 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.806198] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-401f59c1-208a-4df7-81ef-cf5960b63864 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.826894] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962176, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.616647} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1196.827210] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 22c8918b-c67e-467c-8aea-7dff71a8d266/22c8918b-c67e-467c-8aea-7dff71a8d266.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1196.827421] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1196.827667] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8fc23139-c749-4d4f-8a3e-b859d5211b57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1196.835505] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1196.835505] env[68217]: value = "task-2962179" [ 1196.835505] env[68217]: _type = "Task" [ 1196.835505] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1196.844468] env[68217]: DEBUG nova.compute.manager [req-b4cabe53-9db9-4f06-bac5-ffe821a2b931 req-234ac488-a050-4f66-a385-efd927865bc6 service nova] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Detach interface failed, port_id=c0f23ace-2be2-4dca-b47a-a5b77ba68dd3, reason: Instance 815d1801-fa07-4466-850d-b1a36d630d46 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1196.854234] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.035044] env[68217]: DEBUG nova.compute.utils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1197.035783] env[68217]: DEBUG nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1197.036386] env[68217]: DEBUG nova.network.neutron [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1197.093338] env[68217]: DEBUG nova.policy [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34286c10b8b242fb83eb4f1493b9477b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90ad2b0a8a0743ca80a0685bf56e0446', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1197.159649] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962178, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.163886} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.159932] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.160708] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56867879-7cb4-46df-9393-e747700c6c62 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.180605] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Reconfiguring VM instance instance-00000079 to attach disk [datastore1] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.181439] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-16a7388d-317c-40fd-a55d-86e2f6397a7d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.202044] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1197.202044] env[68217]: value = "task-2962180" [ 1197.202044] env[68217]: _type = "Task" [ 1197.202044] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.205580] env[68217]: DEBUG oslo_concurrency.lockutils [req-8c8b7852-0c6e-41a0-a854-f3d3f5c21d64 req-dd2831ab-32e9-41ab-a66a-d0996d20514a service nova] Releasing lock "refresh_cache-5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1197.211311] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962180, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.264074] env[68217]: INFO nova.compute.manager [-] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Took 1.51 seconds to deallocate network for instance. [ 1197.349922] env[68217]: DEBUG nova.network.neutron [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Successfully created port: 15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1197.357575] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065775} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1197.357888] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1197.358769] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27e42454-437f-4a09-b4a0-9c248a6375eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.383627] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Reconfiguring VM instance instance-00000078 to attach disk [datastore2] 22c8918b-c67e-467c-8aea-7dff71a8d266/22c8918b-c67e-467c-8aea-7dff71a8d266.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1197.384019] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e1d4e042-3f98-481f-aaba-949d6120c160 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.407157] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1197.407157] env[68217]: value = "task-2962181" [ 1197.407157] env[68217]: _type = "Task" [ 1197.407157] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.417856] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962181, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.539068] env[68217]: DEBUG nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1197.723348] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962180, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1197.771341] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1197.771547] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1197.771775] env[68217]: DEBUG nova.objects.instance [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lazy-loading 'resources' on Instance uuid 815d1801-fa07-4466-850d-b1a36d630d46 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1197.918761] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962181, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.219681] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962180, 'name': ReconfigVM_Task, 'duration_secs': 0.583362} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.219972] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Reconfigured VM instance instance-00000079 to attach disk [datastore1] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1198.220626] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8abed5a-70f0-4642-8438-1dd777d61547 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.227532] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1198.227532] env[68217]: value = "task-2962182" [ 1198.227532] env[68217]: _type = "Task" [ 1198.227532] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.235732] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962182, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.392913] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76593767-0066-4740-abe0-86b2dd532483 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.404444] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b3b14b1-e411-4e55-bd2c-8e5e33e947c8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.420341] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962181, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.445722] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f4bcb7-0684-4ce1-88d2-5f8549431f8d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.458787] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61c64f1c-3921-49c8-a949-a1be7a7d90a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.472182] env[68217]: DEBUG nova.compute.provider_tree [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1198.551617] env[68217]: DEBUG nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1198.580652] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1198.580930] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1198.581140] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1198.581392] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1198.581577] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1198.581781] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1198.582068] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1198.582278] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1198.582512] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1198.582684] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1198.582899] env[68217]: DEBUG nova.virt.hardware [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1198.583885] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f80d1093-b194-4786-a44e-b66ec55867eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.593507] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2cc4c9-eab0-48f1-ba6a-d8d6eaaa7d19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.750411] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962182, 'name': Rename_Task, 'duration_secs': 0.371009} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.751574] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1198.752408] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-62cfe451-1f09-4f4e-8db0-8660c8d3ca9c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.766537] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1198.766537] env[68217]: value = "task-2962184" [ 1198.766537] env[68217]: _type = "Task" [ 1198.766537] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.785596] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962184, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.788063] env[68217]: DEBUG nova.compute.manager [req-1068f832-6882-4521-b6c3-07b0a1829969 req-1b630636-d152-4b98-b0f8-b732671b3225 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-vif-plugged-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1198.788314] env[68217]: DEBUG oslo_concurrency.lockutils [req-1068f832-6882-4521-b6c3-07b0a1829969 req-1b630636-d152-4b98-b0f8-b732671b3225 service nova] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1198.788796] env[68217]: DEBUG oslo_concurrency.lockutils [req-1068f832-6882-4521-b6c3-07b0a1829969 req-1b630636-d152-4b98-b0f8-b732671b3225 service nova] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1198.788796] env[68217]: DEBUG oslo_concurrency.lockutils [req-1068f832-6882-4521-b6c3-07b0a1829969 req-1b630636-d152-4b98-b0f8-b732671b3225 service nova] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1198.789038] env[68217]: DEBUG nova.compute.manager [req-1068f832-6882-4521-b6c3-07b0a1829969 req-1b630636-d152-4b98-b0f8-b732671b3225 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] No waiting events found dispatching network-vif-plugged-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1198.789174] env[68217]: WARNING nova.compute.manager [req-1068f832-6882-4521-b6c3-07b0a1829969 req-1b630636-d152-4b98-b0f8-b732671b3225 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received unexpected event network-vif-plugged-15c3165f-90ba-4321-8d1f-aea389ccc77a for instance with vm_state building and task_state spawning. [ 1198.879280] env[68217]: DEBUG nova.network.neutron [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Successfully updated port: 15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1198.920090] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962181, 'name': ReconfigVM_Task, 'duration_secs': 1.054723} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.920403] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Reconfigured VM instance instance-00000078 to attach disk [datastore2] 22c8918b-c67e-467c-8aea-7dff71a8d266/22c8918b-c67e-467c-8aea-7dff71a8d266.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1198.921038] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d8c9f8de-8e4e-4da1-b1e7-d6a6341d1b25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.927491] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1198.927491] env[68217]: value = "task-2962185" [ 1198.927491] env[68217]: _type = "Task" [ 1198.927491] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.936496] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962185, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.993330] env[68217]: ERROR nova.scheduler.client.report [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] [req-42ca37af-7751-48b6-9382-595bca153316] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-42ca37af-7751-48b6-9382-595bca153316"}]} [ 1199.009510] env[68217]: DEBUG nova.scheduler.client.report [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1199.023512] env[68217]: DEBUG nova.scheduler.client.report [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1199.023746] env[68217]: DEBUG nova.compute.provider_tree [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1199.034795] env[68217]: DEBUG nova.scheduler.client.report [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1199.052614] env[68217]: DEBUG nova.scheduler.client.report [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1199.181583] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc976afc-8c4a-4465-b947-e63cf6d2d554 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.189732] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0eaf43b-7bce-4634-96b6-d68afbf4dc1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.220042] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0228aa3-8cde-4b34-a4de-0e5c15842e63 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.228064] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c586b2b2-cc17-4fed-a8eb-f94cd879af46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.241747] env[68217]: DEBUG nova.compute.provider_tree [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1199.276085] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.381924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1199.381924] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1199.381924] env[68217]: DEBUG nova.network.neutron [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1199.438591] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962185, 'name': Rename_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.772060] env[68217]: DEBUG nova.scheduler.client.report [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 165 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1199.772380] env[68217]: DEBUG nova.compute.provider_tree [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 165 to 166 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1199.772502] env[68217]: DEBUG nova.compute.provider_tree [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1199.781641] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962184, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1199.911812] env[68217]: DEBUG nova.network.neutron [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1199.940809] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962185, 'name': Rename_Task, 'duration_secs': 0.834954} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1199.940809] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1199.941153] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4ad2ca7d-6e3f-451a-9d53-26e2d8b79411 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.948226] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1199.948226] env[68217]: value = "task-2962186" [ 1199.948226] env[68217]: _type = "Task" [ 1199.948226] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1199.956319] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962186, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.054583] env[68217]: DEBUG nova.network.neutron [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15c3165f-90", "ovs_interfaceid": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1200.278424] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.507s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.280363] env[68217]: DEBUG oslo_vmware.api [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962184, 'name': PowerOnVM_Task, 'duration_secs': 1.454531} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.280790] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1200.280996] env[68217]: INFO nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Took 6.84 seconds to spawn the instance on the hypervisor. [ 1200.281185] env[68217]: DEBUG nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1200.281946] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae9c451e-a827-4f15-95c5-d799bb2b7d94 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.295469] env[68217]: INFO nova.scheduler.client.report [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Deleted allocations for instance 815d1801-fa07-4466-850d-b1a36d630d46 [ 1200.458673] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962186, 'name': PowerOnVM_Task} progress is 87%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.558085] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1200.558498] env[68217]: DEBUG nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Instance network_info: |[{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15c3165f-90", "ovs_interfaceid": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1200.558974] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:06:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15c3165f-90ba-4321-8d1f-aea389ccc77a', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1200.567319] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1200.567555] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1200.567787] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f4b5c67-c49a-4dab-bb27-bf6ccecbdad9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.592767] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1200.592767] env[68217]: value = "task-2962187" [ 1200.592767] env[68217]: _type = "Task" [ 1200.592767] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1200.600559] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962187, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1200.798702] env[68217]: INFO nova.compute.manager [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Took 12.91 seconds to build instance. [ 1200.801892] env[68217]: DEBUG oslo_concurrency.lockutils [None req-370df269-fa69-49c0-a2c2-81c727612c0f tempest-AttachVolumeShelveTestJSON-1201743729 tempest-AttachVolumeShelveTestJSON-1201743729-project-member] Lock "815d1801-fa07-4466-850d-b1a36d630d46" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.181s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1200.818991] env[68217]: DEBUG nova.compute.manager [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1200.819157] env[68217]: DEBUG nova.compute.manager [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing instance network info cache due to event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1200.819348] env[68217]: DEBUG oslo_concurrency.lockutils [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] Acquiring lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1200.819498] env[68217]: DEBUG oslo_concurrency.lockutils [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] Acquired lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1200.819755] env[68217]: DEBUG nova.network.neutron [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1200.963991] env[68217]: DEBUG oslo_vmware.api [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962186, 'name': PowerOnVM_Task, 'duration_secs': 0.576703} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1200.964288] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1200.964494] env[68217]: INFO nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Took 9.76 seconds to spawn the instance on the hypervisor. [ 1200.964672] env[68217]: DEBUG nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1200.965504] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c2ea36e-0f66-4e60-8a54-7ec7163561c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.104492] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962187, 'name': CreateVM_Task, 'duration_secs': 0.342914} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.104700] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1201.105383] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.105559] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.105899] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1201.106211] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-442129ce-e238-4116-bf83-0279c887e367 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.111599] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1201.111599] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e2ddbe-54fd-c5d0-7fbb-1034c92c066f" [ 1201.111599] env[68217]: _type = "Task" [ 1201.111599] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.120521] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e2ddbe-54fd-c5d0-7fbb-1034c92c066f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.302597] env[68217]: DEBUG oslo_concurrency.lockutils [None req-764f725d-e264-4c93-a363-446c84b2705c tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.426s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1201.486349] env[68217]: INFO nova.compute.manager [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Took 14.54 seconds to build instance. [ 1201.583449] env[68217]: DEBUG nova.network.neutron [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updated VIF entry in instance network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1201.583880] env[68217]: DEBUG nova.network.neutron [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15c3165f-90", "ovs_interfaceid": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.621927] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e2ddbe-54fd-c5d0-7fbb-1034c92c066f, 'name': SearchDatastore_Task, 'duration_secs': 0.011849} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1201.622239] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1201.622468] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1201.622730] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1201.622881] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1201.623077] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1201.623336] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1ab8190-958a-4f10-8b65-b35c7aefac18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.635257] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1201.635452] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1201.636217] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-db546dbc-b17d-4579-a2a5-842704709c25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.642040] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1201.642040] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b1ade4-a4db-6974-f544-ebcbc382d3ae" [ 1201.642040] env[68217]: _type = "Task" [ 1201.642040] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.650268] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b1ade4-a4db-6974-f544-ebcbc382d3ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.988602] env[68217]: DEBUG oslo_concurrency.lockutils [None req-98465857-1332-4af0-a76d-283126e53a87 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.056s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1202.086831] env[68217]: DEBUG oslo_concurrency.lockutils [req-572060e0-5dd3-428f-8756-f3c7a711341f req-136d3bdd-e878-4da0-bc24-8d26ff9522ed service nova] Releasing lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.153227] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b1ade4-a4db-6974-f544-ebcbc382d3ae, 'name': SearchDatastore_Task, 'duration_secs': 0.014668} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.153998] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d6f2096c-4e8c-4dd6-aaea-1fbef0b37c74 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.159599] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1202.159599] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e61c7-dd10-5da2-73cf-a0d72ac58df0" [ 1202.159599] env[68217]: _type = "Task" [ 1202.159599] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.167943] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e61c7-dd10-5da2-73cf-a0d72ac58df0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.345909] env[68217]: INFO nova.compute.manager [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Rebuilding instance [ 1202.405174] env[68217]: DEBUG nova.compute.manager [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1202.406135] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e258fd3-2bcb-48e3-9ade-d6616f4cf305 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.670409] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526e61c7-dd10-5da2-73cf-a0d72ac58df0, 'name': SearchDatastore_Task, 'duration_secs': 0.010996} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.670679] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1202.670936] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1202.671229] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a3bdd76d-9ed9-4eb9-be55-cf407efa2502 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.678379] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1202.678379] env[68217]: value = "task-2962190" [ 1202.678379] env[68217]: _type = "Task" [ 1202.678379] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.689369] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962190, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.852828] env[68217]: DEBUG nova.compute.manager [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Received event network-changed-306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1202.852828] env[68217]: DEBUG nova.compute.manager [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Refreshing instance network info cache due to event network-changed-306424b1-5a55-49af-a82a-c5cc49304899. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1202.852828] env[68217]: DEBUG oslo_concurrency.lockutils [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] Acquiring lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1202.853172] env[68217]: DEBUG oslo_concurrency.lockutils [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] Acquired lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1202.854019] env[68217]: DEBUG nova.network.neutron [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Refreshing network info cache for port 306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1203.189743] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962190, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.482092} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.190154] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1203.190238] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1203.190443] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5fea2b7f-a3eb-468f-8875-a8ce8ab6be58 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.196919] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1203.196919] env[68217]: value = "task-2962191" [ 1203.196919] env[68217]: _type = "Task" [ 1203.196919] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.206681] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962191, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.434410] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1203.434729] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6d6a2a7c-24e7-4d1a-9f09-c4a7635923aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.444135] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1203.444135] env[68217]: value = "task-2962192" [ 1203.444135] env[68217]: _type = "Task" [ 1203.444135] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.454027] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962192, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.572493] env[68217]: DEBUG nova.network.neutron [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Updated VIF entry in instance network info cache for port 306424b1-5a55-49af-a82a-c5cc49304899. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1203.572854] env[68217]: DEBUG nova.network.neutron [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Updating instance_info_cache with network_info: [{"id": "306424b1-5a55-49af-a82a-c5cc49304899", "address": "fa:16:3e:1e:7b:4b", "network": {"id": "8b51e7d9-8ebe-4854-9b02-3b7f7afff71c", "bridge": "br-int", "label": "tempest-ServersTestFqdnHostnames-391603824-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.128", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b669701704154ef9b91f04d4d36c0564", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f0ef5aba-bd9a-42ff-a1a0-5e763986d70a", "external-id": "nsx-vlan-transportzone-209", "segmentation_id": 209, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap306424b1-5a", "ovs_interfaceid": "306424b1-5a55-49af-a82a-c5cc49304899", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1203.708956] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962191, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070423} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.709211] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1203.710195] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bedc591f-16cb-4e96-80b0-e9a664a1f69f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.733265] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Reconfiguring VM instance instance-0000007a to attach disk [datastore1] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1203.733583] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d5f481f9-6106-48b8-a125-619d6c52a92f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.754215] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1203.754215] env[68217]: value = "task-2962193" [ 1203.754215] env[68217]: _type = "Task" [ 1203.754215] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1203.763689] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962193, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1203.954531] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962192, 'name': PowerOffVM_Task, 'duration_secs': 0.295831} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1203.954531] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1203.955023] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1203.955848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be6bb909-ce66-4e73-806f-2ae3a8e3303b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.963322] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1203.963524] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dc2b20fb-8846-415f-953c-69ed367b3216 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1203.992299] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1203.992514] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1203.992695] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Deleting the datastore file [datastore1] c9601da9-f07c-4cea-9a40-0b1bca35a17a {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1203.992968] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-39cbfa67-3708-458c-a6ea-d936445b5575 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.000168] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1204.000168] env[68217]: value = "task-2962195" [ 1204.000168] env[68217]: _type = "Task" [ 1204.000168] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.009041] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962195, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.076409] env[68217]: DEBUG oslo_concurrency.lockutils [req-cee94acf-abe1-4cb7-8780-96cd795eed2c req-86114f05-1db1-4d85-ae08-8ba33ea5855b service nova] Releasing lock "refresh_cache-22c8918b-c67e-467c-8aea-7dff71a8d266" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1204.264708] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962193, 'name': ReconfigVM_Task, 'duration_secs': 0.348333} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.265092] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Reconfigured VM instance instance-0000007a to attach disk [datastore1] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1204.265632] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-613a0bc8-6fcf-4273-be5d-98ac29b1d0a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1204.272505] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1204.272505] env[68217]: value = "task-2962196" [ 1204.272505] env[68217]: _type = "Task" [ 1204.272505] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1204.281659] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962196, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1204.510341] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962195, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.120406} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1204.510604] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1204.510788] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1204.510961] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1204.783120] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962196, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.283414] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962196, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.546007] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1205.546267] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1205.546422] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1205.546601] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1205.546745] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1205.546890] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1205.547114] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1205.547279] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1205.547446] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1205.547595] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1205.547753] env[68217]: DEBUG nova.virt.hardware [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1205.548624] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac74545-8bf1-49c3-bd0b-b33346757757 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.556745] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6e88ab2-9df0-4818-aadd-2e8bf7739a23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.570200] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1205.575618] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1205.575872] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1205.576108] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6876eef5-110b-461f-aea6-c13bba14bf4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.594182] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1205.594182] env[68217]: value = "task-2962197" [ 1205.594182] env[68217]: _type = "Task" [ 1205.594182] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.603419] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962197, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1205.784362] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962196, 'name': Rename_Task, 'duration_secs': 1.154339} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1205.784680] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1205.784950] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7b5a62f9-2baa-41d4-bd08-12c3dfb950c0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.792223] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1205.792223] env[68217]: value = "task-2962198" [ 1205.792223] env[68217]: _type = "Task" [ 1205.792223] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1205.808723] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962198, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.109103] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962197, 'name': CreateVM_Task, 'duration_secs': 0.263023} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.109361] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1206.110056] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.110056] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.110242] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1206.110448] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-63466b12-4fae-43b1-9a17-03179847496d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.119138] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1206.119138] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bfac48-c899-b085-a5c7-0822ddb176e1" [ 1206.119138] env[68217]: _type = "Task" [ 1206.119138] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.127574] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bfac48-c899-b085-a5c7-0822ddb176e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.304044] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962198, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.631222] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52bfac48-c899-b085-a5c7-0822ddb176e1, 'name': SearchDatastore_Task, 'duration_secs': 0.019117} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.631531] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1206.631765] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1206.632010] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1206.632171] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1206.632348] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1206.632616] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bb2899eb-4d2b-4231-988f-9600459e72e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.644243] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1206.644445] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1206.645158] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dfa76959-a67b-4ef3-8617-b9edbc6672a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1206.651887] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1206.651887] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529815c8-60d3-cd17-ba98-5a0f6fcdb48b" [ 1206.651887] env[68217]: _type = "Task" [ 1206.651887] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1206.662901] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529815c8-60d3-cd17-ba98-5a0f6fcdb48b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1206.803968] env[68217]: DEBUG oslo_vmware.api [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962198, 'name': PowerOnVM_Task, 'duration_secs': 0.546095} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1206.804265] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1206.804475] env[68217]: INFO nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Took 8.25 seconds to spawn the instance on the hypervisor. [ 1206.804653] env[68217]: DEBUG nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1206.805576] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06a17525-ee65-43e3-a120-ba83ec38a34e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.164413] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529815c8-60d3-cd17-ba98-5a0f6fcdb48b, 'name': SearchDatastore_Task, 'duration_secs': 0.011894} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.165248] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b68df5b-f365-4b63-89ce-0f6b869982ae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.172708] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1207.172708] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f21109-f4f0-c39c-294c-52ae61848a56" [ 1207.172708] env[68217]: _type = "Task" [ 1207.172708] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.182664] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f21109-f4f0-c39c-294c-52ae61848a56, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.332100] env[68217]: INFO nova.compute.manager [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Took 13.04 seconds to build instance. [ 1207.684713] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f21109-f4f0-c39c-294c-52ae61848a56, 'name': SearchDatastore_Task, 'duration_secs': 0.012634} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1207.685139] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1207.685482] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1207.685819] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ffeb14f6-76a5-4dac-bf13-ce5962bf8efc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1207.694647] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1207.694647] env[68217]: value = "task-2962200" [ 1207.694647] env[68217]: _type = "Task" [ 1207.694647] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1207.708273] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962200, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1207.743462] env[68217]: DEBUG nova.compute.manager [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1207.743713] env[68217]: DEBUG nova.compute.manager [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing instance network info cache due to event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1207.743915] env[68217]: DEBUG oslo_concurrency.lockutils [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] Acquiring lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1207.744035] env[68217]: DEBUG oslo_concurrency.lockutils [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] Acquired lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1207.744207] env[68217]: DEBUG nova.network.neutron [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1207.835029] env[68217]: DEBUG oslo_concurrency.lockutils [None req-752c7101-4139-4fdb-b764-ec3184b7ce5b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.556s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1208.206070] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962200, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.47525} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.206070] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1208.206070] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1208.206310] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-64efb06b-e5d6-4a2b-8fb0-5a13b861c056 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.214284] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1208.214284] env[68217]: value = "task-2962201" [ 1208.214284] env[68217]: _type = "Task" [ 1208.214284] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.223129] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962201, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.478953] env[68217]: DEBUG nova.network.neutron [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updated VIF entry in instance network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1208.479375] env[68217]: DEBUG nova.network.neutron [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15c3165f-90", "ovs_interfaceid": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1208.724379] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962201, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075421} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1208.724664] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1208.725469] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbc1c8ce-b182-44da-a58d-607f9b421bdf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.748142] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Reconfiguring VM instance instance-00000079 to attach disk [datastore2] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1208.748522] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fdd13228-389d-4a6e-82de-502b7b89371d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1208.777657] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1208.777657] env[68217]: value = "task-2962203" [ 1208.777657] env[68217]: _type = "Task" [ 1208.777657] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1208.787569] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962203, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1208.982954] env[68217]: DEBUG oslo_concurrency.lockutils [req-dbe207c2-ed76-4913-9b8e-9b33abbcb34d req-eb628f70-6a92-4da2-a70d-df5b9ceefa1d service nova] Releasing lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1209.287950] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962203, 'name': ReconfigVM_Task, 'duration_secs': 0.304095} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.289070] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Reconfigured VM instance instance-00000079 to attach disk [datastore2] c9601da9-f07c-4cea-9a40-0b1bca35a17a/c9601da9-f07c-4cea-9a40-0b1bca35a17a.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1209.289219] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ac438f92-4d3c-4681-98ca-549192230956 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.296292] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1209.296292] env[68217]: value = "task-2962204" [ 1209.296292] env[68217]: _type = "Task" [ 1209.296292] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.305281] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962204, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1209.807054] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962204, 'name': Rename_Task, 'duration_secs': 0.145389} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1209.807422] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1209.807575] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bd42a7fd-37b6-4973-918a-124fd35f8203 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1209.814980] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1209.814980] env[68217]: value = "task-2962205" [ 1209.814980] env[68217]: _type = "Task" [ 1209.814980] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1209.833543] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.324791] env[68217]: DEBUG oslo_vmware.api [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962205, 'name': PowerOnVM_Task, 'duration_secs': 0.490306} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1210.325064] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1210.325287] env[68217]: DEBUG nova.compute.manager [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1210.326080] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d1379f1-fa89-40b7-8fca-58c6342a36e7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.841962] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1210.842287] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1210.843942] env[68217]: DEBUG nova.objects.instance [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1211.176728] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.177118] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.177418] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1211.177699] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1211.177995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1211.180361] env[68217]: INFO nova.compute.manager [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Terminating instance [ 1211.239577] env[68217]: INFO nova.compute.manager [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Rebuilding instance [ 1211.283427] env[68217]: DEBUG nova.compute.manager [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1211.284296] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cb94322-0eac-4df3-968a-0d51a14f7d7e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.683755] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "refresh_cache-c9601da9-f07c-4cea-9a40-0b1bca35a17a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1211.683951] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquired lock "refresh_cache-c9601da9-f07c-4cea-9a40-0b1bca35a17a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1211.684116] env[68217]: DEBUG nova.network.neutron [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1211.851124] env[68217]: DEBUG oslo_concurrency.lockutils [None req-9845276e-6e4d-454b-ba67-e318bd5505a7 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1212.205959] env[68217]: DEBUG nova.network.neutron [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1212.262522] env[68217]: DEBUG nova.network.neutron [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1212.299279] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1212.299741] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e1dc0c5-e3a0-4102-9618-5dcdcae6816d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.307567] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1212.307567] env[68217]: value = "task-2962207" [ 1212.307567] env[68217]: _type = "Task" [ 1212.307567] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.315537] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962207, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.765320] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Releasing lock "refresh_cache-c9601da9-f07c-4cea-9a40-0b1bca35a17a" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1212.765835] env[68217]: DEBUG nova.compute.manager [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1212.766077] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.767122] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-045296fe-2fd8-42eb-890d-a9ffaff7ac07 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.776304] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1212.776470] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-403b9739-c809-42c5-97bc-030a2af37b18 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.784039] env[68217]: DEBUG oslo_vmware.api [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1212.784039] env[68217]: value = "task-2962208" [ 1212.784039] env[68217]: _type = "Task" [ 1212.784039] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.793391] env[68217]: DEBUG oslo_vmware.api [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962208, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.817529] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962207, 'name': PowerOffVM_Task, 'duration_secs': 0.250079} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.817789] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1212.818080] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1212.818832] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0a8dde-cde3-49e9-97a2-249860225672 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.825714] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1212.825968] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2ec8354a-45eb-4619-a5a6-e2812ebce1de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.907092] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1212.907468] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1212.907545] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleting the datastore file [datastore1] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1212.907856] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-054871d8-be58-45a3-a200-384a67593740 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1212.916476] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1212.916476] env[68217]: value = "task-2962210" [ 1212.916476] env[68217]: _type = "Task" [ 1212.916476] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1212.925677] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962210, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.295046] env[68217]: DEBUG oslo_vmware.api [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962208, 'name': PowerOffVM_Task, 'duration_secs': 0.199353} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.295046] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1213.295278] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1213.295472] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ee001a70-f444-48bd-8845-0e8fed2558ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.323682] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1213.323914] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1213.324212] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Deleting the datastore file [datastore2] c9601da9-f07c-4cea-9a40-0b1bca35a17a {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1213.324575] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d4531467-8c99-46dc-a220-9f65e3f7a814 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1213.334135] env[68217]: DEBUG oslo_vmware.api [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for the task: (returnval){ [ 1213.334135] env[68217]: value = "task-2962212" [ 1213.334135] env[68217]: _type = "Task" [ 1213.334135] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1213.342852] env[68217]: DEBUG oslo_vmware.api [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962212, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1213.427292] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962210, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.18095} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.427635] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1213.427828] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1213.428083] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1213.844026] env[68217]: DEBUG oslo_vmware.api [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Task: {'id': task-2962212, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09201} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1213.844298] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1213.844492] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1213.844670] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1213.844851] env[68217]: INFO nova.compute.manager [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Took 1.08 seconds to destroy the instance on the hypervisor. [ 1213.845104] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1213.845307] env[68217]: DEBUG nova.compute.manager [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1213.845406] env[68217]: DEBUG nova.network.neutron [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1213.861315] env[68217]: DEBUG nova.network.neutron [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1214.066584] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1214.066910] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1214.364071] env[68217]: DEBUG nova.network.neutron [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1214.463154] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1214.463407] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1214.463565] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1214.463742] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1214.463885] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1214.464046] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1214.464266] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1214.464425] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1214.464589] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1214.464750] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1214.464922] env[68217]: DEBUG nova.virt.hardware [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1214.465787] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a12ec80-f6b6-4134-b64a-44c3062d33d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.473805] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-819b6c8e-8cdf-4cab-b743-9c4db5bcbc0a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.486943] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:32:b6:1d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4873ab51-6a06-44e0-a653-3dfbaa42a0d1', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1214.494125] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1214.494345] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1214.494538] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b721f098-aaa5-428a-b80f-b8128045db3a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.514475] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1214.514475] env[68217]: value = "task-2962213" [ 1214.514475] env[68217]: _type = "Task" [ 1214.514475] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.523778] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962213, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1214.569607] env[68217]: DEBUG nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1214.867029] env[68217]: INFO nova.compute.manager [-] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Took 1.02 seconds to deallocate network for instance. [ 1215.025206] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962213, 'name': CreateVM_Task, 'duration_secs': 0.304774} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.025388] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1215.026120] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.026293] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.026619] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1215.026868] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2773415c-1ec6-4c43-ac87-c569628a6590 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.031412] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1215.031412] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cbf675-43a0-f6c4-fb1a-dcabbbcbfd24" [ 1215.031412] env[68217]: _type = "Task" [ 1215.031412] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.039110] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cbf675-43a0-f6c4-fb1a-dcabbbcbfd24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.091144] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.091413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1215.092861] env[68217]: INFO nova.compute.claims [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1215.374224] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1215.542232] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52cbf675-43a0-f6c4-fb1a-dcabbbcbfd24, 'name': SearchDatastore_Task, 'duration_secs': 0.01095} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.542526] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1215.542764] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1215.542998] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1215.543165] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1215.543342] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1215.543626] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7041836a-653f-4310-8329-de1044176918 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.551892] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1215.552074] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1215.552748] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b522013-38e1-4579-8c02-8875ce5e15e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.557579] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1215.557579] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]527011ff-b019-6100-c6b3-924f5e602674" [ 1215.557579] env[68217]: _type = "Task" [ 1215.557579] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.565398] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527011ff-b019-6100-c6b3-924f5e602674, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.068479] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]527011ff-b019-6100-c6b3-924f5e602674, 'name': SearchDatastore_Task, 'duration_secs': 0.008286} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.069295] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b7d180b3-24e4-4114-a9e2-fa6d47483210 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.075518] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1216.075518] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521bc80b-6f0b-cf86-fd78-48a2c1e7b1f8" [ 1216.075518] env[68217]: _type = "Task" [ 1216.075518] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.083484] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521bc80b-6f0b-cf86-fd78-48a2c1e7b1f8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.225216] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2c3cb1-e8ca-4b5a-8698-d89878292d21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.234248] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a195142-919d-4e22-bc34-a5b404844c52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.264989] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ebfe235-7936-4efd-ad80-2015230d2b6d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.273023] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e704191c-8329-4baf-bd16-ba1e684095ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.287822] env[68217]: DEBUG nova.compute.provider_tree [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1216.586308] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521bc80b-6f0b-cf86-fd78-48a2c1e7b1f8, 'name': SearchDatastore_Task, 'duration_secs': 0.010527} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1216.586516] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1216.586777] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1216.587046] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cd63b55b-6383-4e74-8568-0c3fdee0e727 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1216.593871] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1216.593871] env[68217]: value = "task-2962214" [ 1216.593871] env[68217]: _type = "Task" [ 1216.593871] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1216.601747] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962214, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1216.809689] env[68217]: ERROR nova.scheduler.client.report [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [req-71acc3fb-be56-4daa-abe3-656e0c8c9a49] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-71acc3fb-be56-4daa-abe3-656e0c8c9a49"}]} [ 1216.826882] env[68217]: DEBUG nova.scheduler.client.report [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1216.840977] env[68217]: DEBUG nova.scheduler.client.report [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1216.841235] env[68217]: DEBUG nova.compute.provider_tree [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1216.856031] env[68217]: DEBUG nova.scheduler.client.report [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1216.876843] env[68217]: DEBUG nova.scheduler.client.report [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1217.022341] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa8f54a2-be2c-4755-8701-f22440201619 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.030762] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66e674f-4887-40c6-ae60-7b8bb7366b23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.061547] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee2c693-4b89-43ae-9405-fda064900ad6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.069058] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f8b431a-11a6-4ff5-a7e5-f99224d55479 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.082124] env[68217]: DEBUG nova.compute.provider_tree [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1217.102101] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962214, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442613} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.102351] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1217.102556] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1217.102778] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a8d8fcee-126a-404a-aba0-60472cfc4de5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.109221] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1217.109221] env[68217]: value = "task-2962215" [ 1217.109221] env[68217]: _type = "Task" [ 1217.109221] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.118238] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962215, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1217.585804] env[68217]: DEBUG nova.scheduler.client.report [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1217.619601] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962215, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.0633} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1217.619828] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1217.620551] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645dc7ff-1687-4e7a-a84d-9009ea1fa8be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.642118] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Reconfiguring VM instance instance-00000075 to attach disk [datastore2] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1217.642337] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-540a47c5-be68-486f-a6fd-3b07e28690d3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1217.661342] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1217.661342] env[68217]: value = "task-2962216" [ 1217.661342] env[68217]: _type = "Task" [ 1217.661342] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1217.669466] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962216, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.090666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.999s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1218.091143] env[68217]: DEBUG nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1218.093893] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.720s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1218.094178] env[68217]: DEBUG nova.objects.instance [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lazy-loading 'resources' on Instance uuid c9601da9-f07c-4cea-9a40-0b1bca35a17a {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1218.172419] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962216, 'name': ReconfigVM_Task, 'duration_secs': 0.256528} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.172739] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Reconfigured VM instance instance-00000075 to attach disk [datastore2] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb/9844e40f-29ed-48b9-a48f-85fbe10ae2fb.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1218.173377] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3c9cf106-0cd6-4a64-ac98-a5abffd2636f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.180343] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1218.180343] env[68217]: value = "task-2962217" [ 1218.180343] env[68217]: _type = "Task" [ 1218.180343] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.187999] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962217, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.597054] env[68217]: DEBUG nova.compute.utils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1218.601153] env[68217]: DEBUG nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1218.601325] env[68217]: DEBUG nova.network.neutron [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1218.645603] env[68217]: DEBUG nova.policy [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '0939a9bd52d142818e49fbf0c576e4a4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd093c295105c44cca8bd67bd514429d1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1218.693874] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962217, 'name': Rename_Task, 'duration_secs': 0.136737} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.696264] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1218.696706] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4500bf33-9d71-41d2-8f28-6d8937afdca0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.703881] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1218.703881] env[68217]: value = "task-2962218" [ 1218.703881] env[68217]: _type = "Task" [ 1218.703881] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.716544] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.734370] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c982616b-090a-4eab-a264-27ea1f2c115a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.742449] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fde7f00-fb78-4beb-863d-3a3536868b8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.777381] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5793e397-39b8-4aad-838b-de3f1afa57a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.786144] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1375fa7-f658-465a-934e-5537becceef8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.800395] env[68217]: DEBUG nova.compute.provider_tree [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1218.942232] env[68217]: DEBUG nova.network.neutron [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Successfully created port: 0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1219.101712] env[68217]: DEBUG nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1219.214618] env[68217]: DEBUG oslo_vmware.api [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962218, 'name': PowerOnVM_Task, 'duration_secs': 0.437263} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.214887] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1219.215104] env[68217]: DEBUG nova.compute.manager [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1219.215971] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c8969f6-0c06-42ff-a2f1-d8f1d6a79b8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.302996] env[68217]: DEBUG nova.scheduler.client.report [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1219.607510] env[68217]: INFO nova.virt.block_device [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Booting with volume 5c535a76-9a0d-422d-88d0-9feb0c1b7b55 at /dev/sda [ 1219.642958] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-48c0990c-4595-4786-a80f-d3cdb8b55b2f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.653877] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc238f14-7463-4cec-b805-8426e6cc0a46 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.685174] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-52d73394-92f4-495e-8652-9fdb21eadbf2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.694120] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2844a7be-19ab-4837-90c5-0ab0fc2e550f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.724384] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e22333e-a4ba-49d4-8f24-6b40867fefa6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.732551] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1219.734702] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53ab536e-7436-4003-9a23-ccff4ebc3eed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1219.749090] env[68217]: DEBUG nova.virt.block_device [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating existing volume attachment record: da0b9dde-db13-4e32-b169-7cb4df95820c {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1219.807356] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.713s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1219.809674] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.078s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1219.809866] env[68217]: DEBUG nova.objects.instance [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1219.831254] env[68217]: INFO nova.scheduler.client.report [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Deleted allocations for instance c9601da9-f07c-4cea-9a40-0b1bca35a17a [ 1220.300143] env[68217]: DEBUG nova.compute.manager [req-aa2b5224-5c52-41fc-82d3-d3f854b9f749 req-b2cfea72-9c3a-4be1-b0a8-56d2e643ff8c service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Received event network-vif-plugged-0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1220.300368] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa2b5224-5c52-41fc-82d3-d3f854b9f749 req-b2cfea72-9c3a-4be1-b0a8-56d2e643ff8c service nova] Acquiring lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1220.300576] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa2b5224-5c52-41fc-82d3-d3f854b9f749 req-b2cfea72-9c3a-4be1-b0a8-56d2e643ff8c service nova] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1220.300747] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa2b5224-5c52-41fc-82d3-d3f854b9f749 req-b2cfea72-9c3a-4be1-b0a8-56d2e643ff8c service nova] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.300916] env[68217]: DEBUG nova.compute.manager [req-aa2b5224-5c52-41fc-82d3-d3f854b9f749 req-b2cfea72-9c3a-4be1-b0a8-56d2e643ff8c service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] No waiting events found dispatching network-vif-plugged-0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1220.301094] env[68217]: WARNING nova.compute.manager [req-aa2b5224-5c52-41fc-82d3-d3f854b9f749 req-b2cfea72-9c3a-4be1-b0a8-56d2e643ff8c service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Received unexpected event network-vif-plugged-0680c857-993e-4996-8477-cfaa0a5727ad for instance with vm_state building and task_state block_device_mapping. [ 1220.338269] env[68217]: DEBUG oslo_concurrency.lockutils [None req-91121269-bc62-4f9a-9471-0a396cdab947 tempest-ServersListShow298Test-462947744 tempest-ServersListShow298Test-462947744-project-member] Lock "c9601da9-f07c-4cea-9a40-0b1bca35a17a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.161s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.384756] env[68217]: DEBUG nova.network.neutron [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Successfully updated port: 0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1220.819733] env[68217]: DEBUG oslo_concurrency.lockutils [None req-1ab1ae03-bf5d-414e-91fd-4f8491e1230d tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.009s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1220.887631] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1220.887797] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1220.887952] env[68217]: DEBUG nova.network.neutron [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1221.417789] env[68217]: DEBUG nova.network.neutron [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1221.548261] env[68217]: DEBUG nova.network.neutron [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [{"id": "0680c857-993e-4996-8477-cfaa0a5727ad", "address": "fa:16:3e:9d:49:70", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0680c857-99", "ovs_interfaceid": "0680c857-993e-4996-8477-cfaa0a5727ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1221.835106] env[68217]: DEBUG nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1221.835715] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1221.836022] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1221.836200] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1221.836466] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1221.836557] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1221.836721] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1221.836932] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1221.837114] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1221.837296] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1221.837471] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1221.837642] env[68217]: DEBUG nova.virt.hardware [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1221.838599] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c06f52-3610-4790-90ec-1ec33bef9ccd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1221.847518] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-599e8b0b-73fc-4a95-842d-e87c81c3a054 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.051253] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1222.051596] env[68217]: DEBUG nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Instance network_info: |[{"id": "0680c857-993e-4996-8477-cfaa0a5727ad", "address": "fa:16:3e:9d:49:70", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0680c857-99", "ovs_interfaceid": "0680c857-993e-4996-8477-cfaa0a5727ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1222.052033] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9d:49:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '685b4083-b748-41fb-a68a-273b1073fa28', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0680c857-993e-4996-8477-cfaa0a5727ad', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1222.059404] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1222.059613] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1222.059836] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-048a1186-2b81-4fab-9e2a-78b5f4617914 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.081132] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1222.081132] env[68217]: value = "task-2962219" [ 1222.081132] env[68217]: _type = "Task" [ 1222.081132] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.089609] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962219, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1222.326565] env[68217]: DEBUG nova.compute.manager [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Received event network-changed-0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1222.326771] env[68217]: DEBUG nova.compute.manager [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Refreshing instance network info cache due to event network-changed-0680c857-993e-4996-8477-cfaa0a5727ad. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1222.326983] env[68217]: DEBUG oslo_concurrency.lockutils [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] Acquiring lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1222.327142] env[68217]: DEBUG oslo_concurrency.lockutils [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] Acquired lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1222.327311] env[68217]: DEBUG nova.network.neutron [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Refreshing network info cache for port 0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1222.591904] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962219, 'name': CreateVM_Task, 'duration_secs': 0.280072} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1222.592116] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1222.592738] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'mount_device': '/dev/sda', 'attachment_id': 'da0b9dde-db13-4e32-b169-7cb4df95820c', 'device_type': None, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594420', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'name': 'volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e11b2312-4cc2-4b49-bd26-22fd5629669d', 'attached_at': '', 'detached_at': '', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'serial': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55'}, 'guest_format': None, 'delete_on_termination': True, 'boot_index': 0, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=68217) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 1222.592941] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Root volume attach. Driver type: vmdk {{(pid=68217) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 1222.593687] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0cf6a81-b528-48bd-9c33-b32253d9ab2d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.600989] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6b230c8-41b9-48c2-b17c-e9c84a12a5aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.606661] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64422341-851e-4d78-9814-8235d45229a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.612155] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-e396c70c-39c4-45a0-b553-c502df534608 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.619713] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1222.619713] env[68217]: value = "task-2962220" [ 1222.619713] env[68217]: _type = "Task" [ 1222.619713] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1222.626939] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962220, 'name': RelocateVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.004296] env[68217]: DEBUG nova.network.neutron [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updated VIF entry in instance network info cache for port 0680c857-993e-4996-8477-cfaa0a5727ad. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1223.004652] env[68217]: DEBUG nova.network.neutron [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [{"id": "0680c857-993e-4996-8477-cfaa0a5727ad", "address": "fa:16:3e:9d:49:70", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0680c857-99", "ovs_interfaceid": "0680c857-993e-4996-8477-cfaa0a5727ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1223.129798] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962220, 'name': RelocateVM_Task} progress is 19%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1223.507695] env[68217]: DEBUG oslo_concurrency.lockutils [req-22d47a1b-3548-4282-823d-97b24dc67686 req-680af8c1-507d-483b-85d7-d7332897113d service nova] Releasing lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1223.631082] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962220, 'name': RelocateVM_Task, 'duration_secs': 0.715559} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1223.631363] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1223.631564] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594420', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'name': 'volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e11b2312-4cc2-4b49-bd26-22fd5629669d', 'attached_at': '', 'detached_at': '', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'serial': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1223.632311] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d975a01-0c74-40fe-bed2-45d999060ac3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.648621] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27eec0eb-3628-4296-b009-465830f6f3d7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.669017] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55/volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1223.669247] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1dabfa0e-4b0b-4dc8-abe6-a18e1630a9a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1223.688391] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1223.688391] env[68217]: value = "task-2962221" [ 1223.688391] env[68217]: _type = "Task" [ 1223.688391] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1223.696177] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962221, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.199764] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962221, 'name': ReconfigVM_Task, 'duration_secs': 0.240646} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.199997] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55/volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1224.204676] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0fb7c183-ea01-4e15-b3cd-8fe8313ca6f9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.220028] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1224.220028] env[68217]: value = "task-2962222" [ 1224.220028] env[68217]: _type = "Task" [ 1224.220028] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.228289] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962222, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1224.730598] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962222, 'name': ReconfigVM_Task, 'duration_secs': 0.11631} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1224.730892] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594420', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'name': 'volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': 'e11b2312-4cc2-4b49-bd26-22fd5629669d', 'attached_at': '', 'detached_at': '', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'serial': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1224.731434] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8903fd57-88bf-4c69-8316-97b2a33be941 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1224.738743] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1224.738743] env[68217]: value = "task-2962223" [ 1224.738743] env[68217]: _type = "Task" [ 1224.738743] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1224.746737] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962223, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.249118] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962223, 'name': Rename_Task, 'duration_secs': 0.13048} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.249508] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1225.249666] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-082f0657-cf04-44f2-81f5-4ef6eda7ef8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1225.256589] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1225.256589] env[68217]: value = "task-2962224" [ 1225.256589] env[68217]: _type = "Task" [ 1225.256589] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1225.263995] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962224, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1225.766506] env[68217]: DEBUG oslo_vmware.api [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962224, 'name': PowerOnVM_Task, 'duration_secs': 0.425104} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1225.766791] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1225.766953] env[68217]: INFO nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Took 3.93 seconds to spawn the instance on the hypervisor. [ 1225.767145] env[68217]: DEBUG nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1225.767892] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb123ac-63c5-422b-9cbe-8b8e062d1aab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1226.289333] env[68217]: INFO nova.compute.manager [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Took 11.21 seconds to build instance. [ 1226.791992] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3d3f07f2-addb-42b9-91e6-a4ae88117e09 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 12.725s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.348647] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1227.362456] env[68217]: DEBUG nova.compute.manager [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Received event network-changed-21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1227.365020] env[68217]: DEBUG nova.compute.manager [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Refreshing instance network info cache due to event network-changed-21f37b3b-0b0a-412e-8413-f3a1967f5c79. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1227.365020] env[68217]: DEBUG oslo_concurrency.lockutils [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] Acquiring lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1227.365020] env[68217]: DEBUG oslo_concurrency.lockutils [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] Acquired lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1227.365020] env[68217]: DEBUG nova.network.neutron [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Refreshing network info cache for port 21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1227.852598] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.852865] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1227.853049] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1227.853211] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1227.854137] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-114d6ae7-57c5-4334-8ea9-5c9acae7fbf7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.862684] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd00f03-661f-413b-a98c-d69302500a57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.879181] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4698175f-344b-4ff7-aefb-1ebdd50c6cf0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.885714] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa283076-99ea-47f1-8378-ef32270ced72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1227.915062] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=179881MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1227.915220] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1227.915440] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1228.086513] env[68217]: DEBUG nova.network.neutron [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updated VIF entry in instance network info cache for port 21f37b3b-0b0a-412e-8413-f3a1967f5c79. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1228.086872] env[68217]: DEBUG nova.network.neutron [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updating instance_info_cache with network_info: [{"id": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "address": "fa:16:3e:d4:23:d4", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap21f37b3b-0b", "ovs_interfaceid": "21f37b3b-0b0a-412e-8413-f3a1967f5c79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1228.589425] env[68217]: DEBUG oslo_concurrency.lockutils [req-e3dacc3e-1ec4-42a9-8c2c-070f6546124d req-37c40c33-8dfd-4ac0-ae48-43ec1344bc4e service nova] Releasing lock "refresh_cache-a86015ea-fa6b-4cf8-9d79-273ffa02ec23" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1228.863979] env[68217]: DEBUG nova.compute.manager [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1228.926881] env[68217]: INFO nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating resource usage from migration 920b0cb6-9983-47f6-9c8e-3515082d2210 [ 1228.945929] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946095] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946240] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d28bcf16-b081-4dc8-a975-2acaed222e15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946361] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 9844e40f-29ed-48b9-a48f-85fbe10ae2fb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946474] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 01c32252-f6e0-4cb0-966e-622872d49199 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946588] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946698] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 22c8918b-c67e-467c-8aea-7dff71a8d266 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946806] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b5fdce0e-465a-4cf0-9a15-313bba7a11e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.946916] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Migration 920b0cb6-9983-47f6-9c8e-3515082d2210 is active on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1228.947038] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e11b2312-4cc2-4b49-bd26-22fd5629669d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1228.947246] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1228.947381] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2240MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1229.059388] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a8b5e88-0d95-48fd-905f-60c7cfcf8a49 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.067355] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b34dce83-2849-43bc-8cb1-6254713da69f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.096743] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1baf87ce-4054-4741-a376-70c0092bfd24 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.103648] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94f1af3a-0ff8-4aa6-8696-2ac2f0d21378 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.116414] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1229.381540] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1229.388452] env[68217]: DEBUG nova.compute.manager [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Received event network-changed-0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1229.388549] env[68217]: DEBUG nova.compute.manager [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Refreshing instance network info cache due to event network-changed-0680c857-993e-4996-8477-cfaa0a5727ad. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1229.388766] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] Acquiring lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1229.388867] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] Acquired lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1229.389039] env[68217]: DEBUG nova.network.neutron [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Refreshing network info cache for port 0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1229.635965] env[68217]: ERROR nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [req-303e90d4-c732-40b2-aa01-adb80dfb9101] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-303e90d4-c732-40b2-aa01-adb80dfb9101"}]} [ 1229.650968] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1229.662906] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1229.663102] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1229.672370] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1229.691660] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1229.791612] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae04ab13-6f98-45fa-958f-334e59ce281e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.799023] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4256adef-0a41-497a-9e10-bf35b43adb71 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.828277] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6f5a577-877c-4dab-910a-7a903be06537 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.835343] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3042e97d-c512-47e7-917f-b4c78d7fbb23 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1229.848402] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1230.076521] env[68217]: DEBUG nova.network.neutron [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updated VIF entry in instance network info cache for port 0680c857-993e-4996-8477-cfaa0a5727ad. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1230.076880] env[68217]: DEBUG nova.network.neutron [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [{"id": "0680c857-993e-4996-8477-cfaa0a5727ad", "address": "fa:16:3e:9d:49:70", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0680c857-99", "ovs_interfaceid": "0680c857-993e-4996-8477-cfaa0a5727ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1230.377802] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 169 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1230.378036] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 169 to 170 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1230.378338] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1230.579677] env[68217]: DEBUG oslo_concurrency.lockutils [req-2ef484cd-a609-47a0-a72b-933a39b5b593 req-2e1be08a-759c-4b1a-bb13-7bdb47c45189 service nova] Releasing lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1230.883250] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1230.883631] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.968s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1230.883682] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 1.502s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.326017] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1231.326017] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1231.388055] env[68217]: INFO nova.compute.claims [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1231.829120] env[68217]: DEBUG nova.compute.utils [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1231.894249] env[68217]: INFO nova.compute.resource_tracker [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating resource usage from migration 920b0cb6-9983-47f6-9c8e-3515082d2210 [ 1232.019356] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab8bf3aa-0841-4071-a1d2-18750cdc055a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.027475] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc24b38-76c0-4362-9199-1f26238c2bde {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.057378] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49d351c0-709d-4d3b-a512-a108ee759584 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.064819] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f8a7af3-8a9e-4b65-959c-9f0fe949f5b9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1232.078100] env[68217]: DEBUG nova.compute.provider_tree [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1232.333050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.581161] env[68217]: DEBUG nova.scheduler.client.report [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1232.667712] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "22c8918b-c67e-467c-8aea-7dff71a8d266" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.668024] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.668238] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "22c8918b-c67e-467c-8aea-7dff71a8d266-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1232.668435] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1232.668605] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1232.670761] env[68217]: INFO nova.compute.manager [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Terminating instance [ 1233.086502] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.203s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1233.086923] env[68217]: INFO nova.compute.manager [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Migrating [ 1233.174853] env[68217]: DEBUG nova.compute.manager [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1233.175125] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1233.176302] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78ab4ea2-f48e-4011-b162-6364e5222471 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.183866] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1233.184123] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a7db1b81-78aa-487b-9936-a27d60a8c943 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.189877] env[68217]: DEBUG oslo_vmware.api [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1233.189877] env[68217]: value = "task-2962225" [ 1233.189877] env[68217]: _type = "Task" [ 1233.189877] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.198179] env[68217]: DEBUG oslo_vmware.api [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.387265] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1233.387541] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1233.387784] env[68217]: INFO nova.compute.manager [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Attaching volume 0f1397a7-fdaa-4f0b-aa8b-820878707d71 to /dev/sdb [ 1233.417731] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f256161-3df1-40e3-908f-9f24d38952e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.424931] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6bb47d-cd13-4b6a-a100-bfcb6538d598 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.438214] env[68217]: DEBUG nova.virt.block_device [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updating existing volume attachment record: f4b3f505-ee2b-4e36-845b-d538a9ef8754 {{(pid=68217) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1233.602178] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.602506] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1233.602833] env[68217]: DEBUG nova.network.neutron [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1233.700124] env[68217]: DEBUG oslo_vmware.api [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962225, 'name': PowerOffVM_Task, 'duration_secs': 0.171684} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.700413] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1233.700601] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1233.700865] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b1e928c-c613-44c2-bdd4-628a2669b242 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.760464] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1233.760859] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1233.761241] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Deleting the datastore file [datastore2] 22c8918b-c67e-467c-8aea-7dff71a8d266 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1233.761511] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c7e8b39d-242f-4eb3-a567-7977ae4b0172 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.767768] env[68217]: DEBUG oslo_vmware.api [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for the task: (returnval){ [ 1233.767768] env[68217]: value = "task-2962228" [ 1233.767768] env[68217]: _type = "Task" [ 1233.767768] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.775917] env[68217]: DEBUG oslo_vmware.api [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962228, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.885982] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.886254] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.886467] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.886650] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.886845] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1233.887052] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.279573] env[68217]: DEBUG oslo_vmware.api [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Task: {'id': task-2962228, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142876} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.279933] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1234.279981] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1234.280176] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1234.280348] env[68217]: INFO nova.compute.manager [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1234.280619] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1234.280824] env[68217]: DEBUG nova.compute.manager [-] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1234.280919] env[68217]: DEBUG nova.network.neutron [-] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1234.348898] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.349060] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1234.391981] env[68217]: DEBUG nova.network.neutron [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [{"id": "0680c857-993e-4996-8477-cfaa0a5727ad", "address": "fa:16:3e:9d:49:70", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0680c857-99", "ovs_interfaceid": "0680c857-993e-4996-8477-cfaa0a5727ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.775665] env[68217]: DEBUG nova.compute.manager [req-bb9e7627-8844-4910-b181-9bcfa7d5a09f req-fc1267b7-8ba4-4b8f-a43e-c00f233be3f4 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Received event network-vif-deleted-306424b1-5a55-49af-a82a-c5cc49304899 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1234.775863] env[68217]: INFO nova.compute.manager [req-bb9e7627-8844-4910-b181-9bcfa7d5a09f req-fc1267b7-8ba4-4b8f-a43e-c00f233be3f4 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Neutron deleted interface 306424b1-5a55-49af-a82a-c5cc49304899; detaching it from the instance and deleting it from the info cache [ 1234.776072] env[68217]: DEBUG nova.network.neutron [req-bb9e7627-8844-4910-b181-9bcfa7d5a09f req-fc1267b7-8ba4-4b8f-a43e-c00f233be3f4 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.894794] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1235.254972] env[68217]: DEBUG nova.network.neutron [-] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1235.278465] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-86a97125-2ba6-4c3b-bc30-83779da91e85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.288630] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8b5f0f-94f9-459b-bfa8-5f95eecc7820 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.318040] env[68217]: DEBUG nova.compute.manager [req-bb9e7627-8844-4910-b181-9bcfa7d5a09f req-fc1267b7-8ba4-4b8f-a43e-c00f233be3f4 service nova] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Detach interface failed, port_id=306424b1-5a55-49af-a82a-c5cc49304899, reason: Instance 22c8918b-c67e-467c-8aea-7dff71a8d266 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1235.758093] env[68217]: INFO nova.compute.manager [-] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Took 1.48 seconds to deallocate network for instance. [ 1236.264023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1236.264320] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1236.264550] env[68217]: DEBUG nova.objects.instance [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lazy-loading 'resources' on Instance uuid 22c8918b-c67e-467c-8aea-7dff71a8d266 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1236.409202] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a3266ef-57a3-49ca-9908-40379811b004 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.428580] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance 'e11b2312-4cc2-4b49-bd26-22fd5629669d' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1236.879381] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3639e52e-643c-4a42-be59-18fa7a8afe5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.886898] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2043b0ca-f8c0-46e4-9fad-d65acf8ed584 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.915190] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-311a6841-4e3e-4814-8c0d-1d4f25330b4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.921752] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cead4ba8-40de-4a01-bd3e-759d4e698bf0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.935961] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1236.936423] env[68217]: DEBUG nova.compute.provider_tree [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1236.937687] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-eeac1841-9bb4-41b4-a5ee-58bfd0c6aea1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.945089] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1236.945089] env[68217]: value = "task-2962230" [ 1236.945089] env[68217]: _type = "Task" [ 1236.945089] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.953180] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962230, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.441274] env[68217]: DEBUG nova.scheduler.client.report [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1237.455908] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962230, 'name': PowerOffVM_Task, 'duration_secs': 0.212032} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.456708] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1237.456890] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance 'e11b2312-4cc2-4b49-bd26-22fd5629669d' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1237.945656] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1237.962898] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1237.963154] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1237.963314] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1237.963515] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1237.963655] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1237.963799] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1237.963996] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1237.964171] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1237.964335] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1237.964496] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1237.964667] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1237.970454] env[68217]: INFO nova.scheduler.client.report [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Deleted allocations for instance 22c8918b-c67e-467c-8aea-7dff71a8d266 [ 1237.971427] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-551d0cc0-a09c-4575-921a-3d153b0b7360 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.983312] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Volume attach. Driver type: vmdk {{(pid=68217) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1237.983527] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594424', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'name': 'volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c', 'attached_at': '', 'detached_at': '', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'serial': '0f1397a7-fdaa-4f0b-aa8b-820878707d71'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1237.986411] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dda4aff-7035-4754-99db-7a3c7e8a1510 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.003349] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8c9059-75e2-4e61-9063-469a902b581d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.005810] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1238.005810] env[68217]: value = "task-2962231" [ 1238.005810] env[68217]: _type = "Task" [ 1238.005810] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.028825] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Reconfiguring VM instance instance-00000077 to attach disk [datastore1] volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71/volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1238.030431] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e8ee9863-3767-4cad-9467-97b4c36eabce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.047333] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962231, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.052512] env[68217]: DEBUG oslo_vmware.api [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1238.052512] env[68217]: value = "task-2962232" [ 1238.052512] env[68217]: _type = "Task" [ 1238.052512] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.060752] env[68217]: DEBUG oslo_vmware.api [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962232, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.492022] env[68217]: DEBUG oslo_concurrency.lockutils [None req-117376a4-a809-4bfc-8848-53cc41bc9188 tempest-ServersTestFqdnHostnames-515670926 tempest-ServersTestFqdnHostnames-515670926-project-member] Lock "22c8918b-c67e-467c-8aea-7dff71a8d266" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.824s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1238.516439] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962231, 'name': ReconfigVM_Task, 'duration_secs': 0.143293} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.516725] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance 'e11b2312-4cc2-4b49-bd26-22fd5629669d' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1238.561770] env[68217]: DEBUG oslo_vmware.api [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962232, 'name': ReconfigVM_Task, 'duration_secs': 0.340511} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.562680] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Reconfigured VM instance instance-00000077 to attach disk [datastore1] volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71/volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1238.567408] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5196541-4951-4067-9eaf-7f7156167fae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.582149] env[68217]: DEBUG oslo_vmware.api [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1238.582149] env[68217]: value = "task-2962233" [ 1238.582149] env[68217]: _type = "Task" [ 1238.582149] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1238.592226] env[68217]: DEBUG oslo_vmware.api [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962233, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.022718] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1239.022976] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1239.023154] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1239.023337] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1239.023496] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1239.023642] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1239.023922] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1239.024138] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1239.024320] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1239.024491] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1239.024695] env[68217]: DEBUG nova.virt.hardware [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1239.030132] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1239.030426] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e07741a4-34a8-4b71-840b-7e6bc5aa56b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.049593] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1239.049593] env[68217]: value = "task-2962234" [ 1239.049593] env[68217]: _type = "Task" [ 1239.049593] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.057310] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962234, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1239.090952] env[68217]: DEBUG oslo_vmware.api [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962233, 'name': ReconfigVM_Task, 'duration_secs': 0.124419} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.091260] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594424', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'name': 'volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c', 'attached_at': '', 'detached_at': '', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'serial': '0f1397a7-fdaa-4f0b-aa8b-820878707d71'} {{(pid=68217) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1239.559941] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962234, 'name': ReconfigVM_Task, 'duration_secs': 0.161152} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1239.560337] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1239.562030] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6218ea5e-ae8e-4a06-a735-ea96275598e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.584161] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfiguring VM instance instance-0000007b to attach disk [datastore1] volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55/volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1239.585150] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fb3c7d7b-d032-452e-9693-d3399d92eef9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1239.604092] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1239.604092] env[68217]: value = "task-2962235" [ 1239.604092] env[68217]: _type = "Task" [ 1239.604092] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1239.611746] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962235, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.115516] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962235, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1240.131814] env[68217]: DEBUG nova.objects.instance [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'flavor' on Instance uuid 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1240.614884] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962235, 'name': ReconfigVM_Task, 'duration_secs': 0.798978} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1240.615187] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfigured VM instance instance-0000007b to attach disk [datastore1] volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55/volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1240.615507] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance 'e11b2312-4cc2-4b49-bd26-22fd5629669d' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1240.635749] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a60608c3-a3a9-4d2b-b02c-0ed2a8e1f472 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1240.964724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1240.964997] env[68217]: DEBUG oslo_concurrency.lockutils [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1241.121705] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2cee115-223a-4dc4-8e7a-5e779cdd3f9b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.141823] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c75a02f-4c97-4f23-b40f-13fd5fe74a8a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.158961] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance 'e11b2312-4cc2-4b49-bd26-22fd5629669d' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1241.344382] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1241.468537] env[68217]: INFO nova.compute.manager [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Detaching volume 0f1397a7-fdaa-4f0b-aa8b-820878707d71 [ 1241.506479] env[68217]: INFO nova.virt.block_device [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Attempting to driver detach volume 0f1397a7-fdaa-4f0b-aa8b-820878707d71 from mountpoint /dev/sdb [ 1241.506771] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1241.507925] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594424', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'name': 'volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c', 'attached_at': '', 'detached_at': '', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'serial': '0f1397a7-fdaa-4f0b-aa8b-820878707d71'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1241.507925] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3602f79d-856a-4d29-aaa1-07651524d8af {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.530626] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2376972-b3d8-4695-990e-fba28f97a589 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.537362] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961f3c42-10c7-4fd5-8e40-853ee50b39cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.556555] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77ddeb5e-fc76-46d1-a386-9a0449016593 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.572527] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] The volume has not been displaced from its original location: [datastore1] volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71/volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1241.577665] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Reconfiguring VM instance instance-00000077 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1241.577931] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-138c356d-a39d-434f-80f1-7c6e04478825 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1241.596093] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1241.596093] env[68217]: value = "task-2962236" [ 1241.596093] env[68217]: _type = "Task" [ 1241.596093] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1241.603883] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962236, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.107194] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962236, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.609930] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962236, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1242.837724] env[68217]: DEBUG nova.network.neutron [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Port 0680c857-993e-4996-8477-cfaa0a5727ad binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1243.107579] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962236, 'name': ReconfigVM_Task, 'duration_secs': 1.218079} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.107920] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Reconfigured VM instance instance-00000077 to detach disk 2001 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1243.112922] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-db835efd-76cd-4a5b-8f25-b0c58f70de62 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1243.127235] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1243.127235] env[68217]: value = "task-2962237" [ 1243.127235] env[68217]: _type = "Task" [ 1243.127235] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1243.135078] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962237, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1243.534820] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.535076] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.637328] env[68217]: DEBUG oslo_vmware.api [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962237, 'name': ReconfigVM_Task, 'duration_secs': 0.168967} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1243.637630] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594424', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'name': 'volume-0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'reserved', 'instance': '5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c', 'attached_at': '', 'detached_at': '', 'volume_id': '0f1397a7-fdaa-4f0b-aa8b-820878707d71', 'serial': '0f1397a7-fdaa-4f0b-aa8b-820878707d71'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1243.860407] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1243.860666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1243.861466] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1244.038214] env[68217]: DEBUG nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1244.182646] env[68217]: DEBUG nova.objects.instance [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'flavor' on Instance uuid 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1244.559140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1244.559438] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1244.561055] env[68217]: INFO nova.compute.claims [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1244.905716] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1244.905904] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1244.906096] env[68217]: DEBUG nova.network.neutron [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1245.189055] env[68217]: DEBUG oslo_concurrency.lockutils [None req-094aee25-9d39-4630-8f5d-31ee10892383 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 4.224s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1245.486498] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1245.486764] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1245.486946] env[68217]: INFO nova.compute.manager [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Shelving [ 1245.621796] env[68217]: DEBUG nova.network.neutron [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [{"id": "0680c857-993e-4996-8477-cfaa0a5727ad", "address": "fa:16:3e:9d:49:70", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0680c857-99", "ovs_interfaceid": "0680c857-993e-4996-8477-cfaa0a5727ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1245.685385] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ff25ac-b12f-4e73-a530-2e24427a8dce {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.693288] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-728f7e60-f59d-4cae-8010-9310046ae747 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.723967] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e91a10c-c5ae-4b67-b090-8cffbef26a43 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.731848] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e089529-5ddc-4e29-8aaf-3c0a6712a746 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1245.744821] env[68217]: DEBUG nova.compute.provider_tree [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1246.124310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1246.253094] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.253395] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.253530] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1246.253715] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1246.253874] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1246.255976] env[68217]: INFO nova.compute.manager [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Terminating instance [ 1246.270480] env[68217]: ERROR nova.scheduler.client.report [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [req-47e70f05-7dee-436d-8f3e-01da1552bb14] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-47e70f05-7dee-436d-8f3e-01da1552bb14"}]} [ 1246.287103] env[68217]: DEBUG nova.scheduler.client.report [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1246.300400] env[68217]: DEBUG nova.scheduler.client.report [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1246.300682] env[68217]: DEBUG nova.compute.provider_tree [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1246.310930] env[68217]: DEBUG nova.scheduler.client.report [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1246.327699] env[68217]: DEBUG nova.scheduler.client.report [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1246.445672] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6172115-5313-457a-b1c4-b3d4f55bd889 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.453950] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261f4d43-253f-4f7c-8b5c-d5b3917701a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.484306] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd3685c6-455a-45f7-8afb-6b077f8af488 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.492797] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c662a723-fd74-402c-ba5d-3679c0686122 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.498594] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.506203] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-95e9c99a-08d2-48f8-8c5c-f4868cfdd23a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.508265] env[68217]: DEBUG nova.compute.provider_tree [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1246.514483] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1246.514483] env[68217]: value = "task-2962238" [ 1246.514483] env[68217]: _type = "Task" [ 1246.514483] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.524923] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962238, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1246.633483] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf9257db-0fd1-4502-b4b0-42227f793c7c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.641144] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-354ed90b-9025-4d10-aae8-94a6f79f9ea4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.760062] env[68217]: DEBUG nova.compute.manager [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1246.760062] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1246.760905] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d257b2-8b2e-4613-9417-f8435353609e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.768372] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1246.768569] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1fc660c-e31d-4d48-b11a-610a0aad8e51 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1246.773934] env[68217]: DEBUG oslo_vmware.api [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1246.773934] env[68217]: value = "task-2962239" [ 1246.773934] env[68217]: _type = "Task" [ 1246.773934] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1246.781046] env[68217]: DEBUG oslo_vmware.api [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962239, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.024213] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962238, 'name': PowerOffVM_Task, 'duration_secs': 0.187517} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.024498] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1247.025305] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6067e121-96b2-45ba-a75f-3919a2e958d6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.045029] env[68217]: DEBUG nova.scheduler.client.report [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 171 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1247.045029] env[68217]: DEBUG nova.compute.provider_tree [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 171 to 172 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1247.045240] env[68217]: DEBUG nova.compute.provider_tree [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1247.048703] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee12e91c-d327-4def-8c9c-8d16994aa995 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.284478] env[68217]: DEBUG oslo_vmware.api [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962239, 'name': PowerOffVM_Task, 'duration_secs': 0.18121} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.284815] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1247.284986] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1247.285258] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f0a576e8-a67a-4697-8dfe-c31c2d3516a0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.344712] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1247.344931] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1247.345126] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleting the datastore file [datastore2] 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1247.345385] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0bf583bd-331f-46b2-b03a-04dc89309253 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.351369] env[68217]: DEBUG oslo_vmware.api [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for the task: (returnval){ [ 1247.351369] env[68217]: value = "task-2962241" [ 1247.351369] env[68217]: _type = "Task" [ 1247.351369] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.358846] env[68217]: DEBUG oslo_vmware.api [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962241, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.552226] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.993s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1247.552940] env[68217]: DEBUG nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1247.558965] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Creating Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 1247.559339] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-3fa5b577-0416-4636-b56a-c49e93ab3f7e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.567269] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1247.567269] env[68217]: value = "task-2962242" [ 1247.567269] env[68217]: _type = "Task" [ 1247.567269] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1247.575545] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962242, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1247.734650] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33fd732a-dd89-4c55-8d22-a079c77c7ca5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.754710] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10570717-9a46-4282-a6ec-1739670922a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1247.762096] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance 'e11b2312-4cc2-4b49-bd26-22fd5629669d' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1247.861054] env[68217]: DEBUG oslo_vmware.api [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Task: {'id': task-2962241, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127699} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1247.861250] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1247.861437] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1247.861619] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1247.861791] env[68217]: INFO nova.compute.manager [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1247.862044] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1247.862240] env[68217]: DEBUG nova.compute.manager [-] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1247.862352] env[68217]: DEBUG nova.network.neutron [-] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1248.058508] env[68217]: DEBUG nova.compute.utils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1248.060052] env[68217]: DEBUG nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1248.064023] env[68217]: DEBUG nova.network.neutron [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1248.077203] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962242, 'name': CreateSnapshot_Task, 'duration_secs': 0.414652} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.077447] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Created Snapshot of the VM instance {{(pid=68217) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 1248.078166] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e351ffb4-1d33-4b1d-8aad-6d129fbf3fc2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.134384] env[68217]: DEBUG nova.policy [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '510a80e9450d4eb9ad47dc3526e8c477', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '64cde75b201e4038b914350b250a8bd6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1248.269051] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1248.269376] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1e22e6cf-a7d4-4931-a493-02fc8cdc837a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.276448] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1248.276448] env[68217]: value = "task-2962243" [ 1248.276448] env[68217]: _type = "Task" [ 1248.276448] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.284276] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962243, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.533622] env[68217]: DEBUG nova.compute.manager [req-3aea122b-cecc-4c1f-abf3-d7c127ca2ba0 req-e6ae5699-9b40-4295-8802-90586c864148 service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Received event network-vif-deleted-ab455e1f-1232-4fd4-a71b-b73ce15172ff {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1248.533902] env[68217]: INFO nova.compute.manager [req-3aea122b-cecc-4c1f-abf3-d7c127ca2ba0 req-e6ae5699-9b40-4295-8802-90586c864148 service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Neutron deleted interface ab455e1f-1232-4fd4-a71b-b73ce15172ff; detaching it from the instance and deleting it from the info cache [ 1248.534062] env[68217]: DEBUG nova.network.neutron [req-3aea122b-cecc-4c1f-abf3-d7c127ca2ba0 req-e6ae5699-9b40-4295-8802-90586c864148 service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1248.563552] env[68217]: DEBUG nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1248.575815] env[68217]: DEBUG nova.network.neutron [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Successfully created port: 4981c3d0-c0c4-444d-9992-6f6f173aedd2 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1248.597607] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Creating linked-clone VM from snapshot {{(pid=68217) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 1248.598665] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-13b2a0a0-7d7a-4a9e-b238-9bfd022ca922 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.613557] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1248.613557] env[68217]: value = "task-2962244" [ 1248.613557] env[68217]: _type = "Task" [ 1248.613557] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.620519] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962244, 'name': CloneVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.788579] env[68217]: DEBUG oslo_vmware.api [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962243, 'name': PowerOnVM_Task, 'duration_secs': 0.363577} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1248.788877] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1248.789080] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-0394794c-1da9-48b0-b56e-02119418fcde tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance 'e11b2312-4cc2-4b49-bd26-22fd5629669d' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1249.015422] env[68217]: DEBUG nova.network.neutron [-] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1249.036456] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f4306bdb-18ce-4891-94ff-5c47a4e5fbc5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.050119] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c752a007-371d-400f-95a2-57472ee043a2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.095950] env[68217]: DEBUG nova.compute.manager [req-3aea122b-cecc-4c1f-abf3-d7c127ca2ba0 req-e6ae5699-9b40-4295-8802-90586c864148 service nova] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Detach interface failed, port_id=ab455e1f-1232-4fd4-a71b-b73ce15172ff, reason: Instance 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1249.121221] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962244, 'name': CloneVM_Task} progress is 94%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.518058] env[68217]: INFO nova.compute.manager [-] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Took 1.66 seconds to deallocate network for instance. [ 1249.576587] env[68217]: DEBUG nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1249.603572] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1249.603877] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1249.604087] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1249.604318] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1249.604507] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1249.604696] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1249.604940] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1249.605138] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1249.605354] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1249.605565] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1249.605823] env[68217]: DEBUG nova.virt.hardware [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1249.606874] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eb9cc5a-f769-4739-aeca-e699957a3dfb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.617842] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9390a0aa-485f-414f-92a7-f76e6025bda0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.627071] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962244, 'name': CloneVM_Task} progress is 95%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1250.024313] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.024604] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.024829] env[68217]: DEBUG nova.objects.instance [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lazy-loading 'resources' on Instance uuid 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1250.112608] env[68217]: DEBUG nova.network.neutron [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Successfully updated port: 4981c3d0-c0c4-444d-9992-6f6f173aedd2 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1250.124268] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962244, 'name': CloneVM_Task, 'duration_secs': 1.154355} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1250.125317] env[68217]: INFO nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Created linked-clone VM from snapshot [ 1250.126195] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f53c876-b093-4fa3-85de-0f9dfd9b8dae {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.134480] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Uploading image 1a1f3e1d-f17f-452a-bdff-e6a0645b108c {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 1250.154543] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 1250.154543] env[68217]: value = "vm-594426" [ 1250.154543] env[68217]: _type = "VirtualMachine" [ 1250.154543] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 1250.154783] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-a5fad96a-d16f-463d-ba26-66e9770b0ca4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.161187] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease: (returnval){ [ 1250.161187] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529ddfe8-4ad0-9835-0002-12fc9bfada4f" [ 1250.161187] env[68217]: _type = "HttpNfcLease" [ 1250.161187] env[68217]: } obtained for exporting VM: (result){ [ 1250.161187] env[68217]: value = "vm-594426" [ 1250.161187] env[68217]: _type = "VirtualMachine" [ 1250.161187] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 1250.161464] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the lease: (returnval){ [ 1250.161464] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529ddfe8-4ad0-9835-0002-12fc9bfada4f" [ 1250.161464] env[68217]: _type = "HttpNfcLease" [ 1250.161464] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1250.167201] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1250.167201] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529ddfe8-4ad0-9835-0002-12fc9bfada4f" [ 1250.167201] env[68217]: _type = "HttpNfcLease" [ 1250.167201] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1250.558389] env[68217]: DEBUG nova.compute.manager [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Received event network-vif-plugged-4981c3d0-c0c4-444d-9992-6f6f173aedd2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.558591] env[68217]: DEBUG oslo_concurrency.lockutils [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] Acquiring lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1250.558688] env[68217]: DEBUG oslo_concurrency.lockutils [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1250.558885] env[68217]: DEBUG oslo_concurrency.lockutils [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1250.559038] env[68217]: DEBUG nova.compute.manager [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] No waiting events found dispatching network-vif-plugged-4981c3d0-c0c4-444d-9992-6f6f173aedd2 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1250.559178] env[68217]: WARNING nova.compute.manager [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Received unexpected event network-vif-plugged-4981c3d0-c0c4-444d-9992-6f6f173aedd2 for instance with vm_state building and task_state spawning. [ 1250.559334] env[68217]: DEBUG nova.compute.manager [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Received event network-changed-4981c3d0-c0c4-444d-9992-6f6f173aedd2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1250.559527] env[68217]: DEBUG nova.compute.manager [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Refreshing instance network info cache due to event network-changed-4981c3d0-c0c4-444d-9992-6f6f173aedd2. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1250.559654] env[68217]: DEBUG oslo_concurrency.lockutils [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] Acquiring lock "refresh_cache-945e4574-75b7-4ff7-8e0e-0fee0c90bef1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.559784] env[68217]: DEBUG oslo_concurrency.lockutils [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] Acquired lock "refresh_cache-945e4574-75b7-4ff7-8e0e-0fee0c90bef1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1250.559935] env[68217]: DEBUG nova.network.neutron [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Refreshing network info cache for port 4981c3d0-c0c4-444d-9992-6f6f173aedd2 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1250.615985] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "refresh_cache-945e4574-75b7-4ff7-8e0e-0fee0c90bef1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1250.667533] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93219a48-0f6b-494b-96d3-06dc5184ca39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.675729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3366f4db-74d0-4c58-8a32-cc800d5ce659 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.678772] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1250.678772] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529ddfe8-4ad0-9835-0002-12fc9bfada4f" [ 1250.678772] env[68217]: _type = "HttpNfcLease" [ 1250.678772] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1250.679129] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1250.679129] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529ddfe8-4ad0-9835-0002-12fc9bfada4f" [ 1250.679129] env[68217]: _type = "HttpNfcLease" [ 1250.679129] env[68217]: }. {{(pid=68217) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 1250.680137] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e69bf33-98dc-4e89-9045-116de1273623 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.706274] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dc6c45-d66e-4e7e-9e92-11d1eb71de89 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.711312] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5204b062-584b-e043-ed78-5ffa8e679347/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1250.711524] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Opening URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5204b062-584b-e043-ed78-5ffa8e679347/disk-0.vmdk for reading. {{(pid=68217) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 1250.770434] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84654f27-d772-4699-aadb-9419fad87ab5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.785281] env[68217]: DEBUG nova.compute.provider_tree [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1250.803327] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-5685afa7-88ae-4a79-b8ff-1add54c4af85 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.095733] env[68217]: DEBUG nova.network.neutron [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1251.212800] env[68217]: DEBUG nova.network.neutron [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1251.321539] env[68217]: DEBUG nova.scheduler.client.report [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 172 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1251.321649] env[68217]: DEBUG nova.compute.provider_tree [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 172 to 173 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1251.321924] env[68217]: DEBUG nova.compute.provider_tree [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1251.606247] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1251.606964] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1251.606964] env[68217]: DEBUG nova.compute.manager [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Going to confirm migration 8 {{(pid=68217) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1251.715380] env[68217]: DEBUG oslo_concurrency.lockutils [req-6952e35d-49d7-4094-9e88-bbfc652efbf7 req-60d095fd-9266-4fb5-baeb-afd5dd12b12b service nova] Releasing lock "refresh_cache-945e4574-75b7-4ff7-8e0e-0fee0c90bef1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1251.716370] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquired lock "refresh_cache-945e4574-75b7-4ff7-8e0e-0fee0c90bef1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1251.716748] env[68217]: DEBUG nova.network.neutron [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1251.827264] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.802s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1251.848442] env[68217]: INFO nova.scheduler.client.report [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Deleted allocations for instance 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c [ 1252.184535] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1252.184824] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquired lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1252.185083] env[68217]: DEBUG nova.network.neutron [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1252.185309] env[68217]: DEBUG nova.objects.instance [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'info_cache' on Instance uuid e11b2312-4cc2-4b49-bd26-22fd5629669d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1252.251770] env[68217]: DEBUG nova.network.neutron [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1252.357459] env[68217]: DEBUG oslo_concurrency.lockutils [None req-23100370-17e2-413c-b0e2-76459a7b4131 tempest-AttachVolumeNegativeTest-810049963 tempest-AttachVolumeNegativeTest-810049963-project-member] Lock "5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.103s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1252.420074] env[68217]: DEBUG nova.network.neutron [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Updating instance_info_cache with network_info: [{"id": "4981c3d0-c0c4-444d-9992-6f6f173aedd2", "address": "fa:16:3e:6f:4a:4e", "network": {"id": "aeff003a-cc1c-4c14-8d45-c09a8b424b6c", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-457348950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64cde75b201e4038b914350b250a8bd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4981c3d0-c0", "ovs_interfaceid": "4981c3d0-c0c4-444d-9992-6f6f173aedd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1252.923387] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Releasing lock "refresh_cache-945e4574-75b7-4ff7-8e0e-0fee0c90bef1" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1252.923757] env[68217]: DEBUG nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Instance network_info: |[{"id": "4981c3d0-c0c4-444d-9992-6f6f173aedd2", "address": "fa:16:3e:6f:4a:4e", "network": {"id": "aeff003a-cc1c-4c14-8d45-c09a8b424b6c", "bridge": "br-int", "label": "tempest-InstanceActionsNegativeTestJSON-457348950-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "64cde75b201e4038b914350b250a8bd6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "43ad01d2-c7dd-453c-a929-8ad76294d13c", "external-id": "nsx-vlan-transportzone-176", "segmentation_id": 176, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4981c3d0-c0", "ovs_interfaceid": "4981c3d0-c0c4-444d-9992-6f6f173aedd2", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1252.924167] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:4a:4e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '43ad01d2-c7dd-453c-a929-8ad76294d13c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4981c3d0-c0c4-444d-9992-6f6f173aedd2', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1252.931944] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Creating folder: Project (64cde75b201e4038b914350b250a8bd6). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1252.932274] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4658ed5e-6f90-47e4-99f2-ba68d76f458a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.943254] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Created folder: Project (64cde75b201e4038b914350b250a8bd6) in parent group-v594094. [ 1252.943457] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Creating folder: Instances. Parent ref: group-v594427. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1252.943699] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-20ea2454-2e78-49a7-9b12-17402c15f384 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.952561] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Created folder: Instances in parent group-v594427. [ 1252.952776] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1252.952992] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1252.953228] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8484addc-3a45-40ff-88ed-3227a03477b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.971713] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1252.971713] env[68217]: value = "task-2962249" [ 1252.971713] env[68217]: _type = "Task" [ 1252.971713] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1252.979304] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962249, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.407605] env[68217]: DEBUG nova.network.neutron [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [{"id": "0680c857-993e-4996-8477-cfaa0a5727ad", "address": "fa:16:3e:9d:49:70", "network": {"id": "4150000f-0e86-4f04-b081-553b2ab3b937", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-262407843-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.205", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d093c295105c44cca8bd67bd514429d1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "685b4083-b748-41fb-a68a-273b1073fa28", "external-id": "nsx-vlan-transportzone-312", "segmentation_id": 312, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0680c857-99", "ovs_interfaceid": "0680c857-993e-4996-8477-cfaa0a5727ad", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.482028] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962249, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1253.911088] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Releasing lock "refresh_cache-e11b2312-4cc2-4b49-bd26-22fd5629669d" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1253.911404] env[68217]: DEBUG nova.objects.instance [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'migration_context' on Instance uuid e11b2312-4cc2-4b49-bd26-22fd5629669d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1253.986908] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962249, 'name': CreateVM_Task} progress is 25%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.415053] env[68217]: DEBUG nova.objects.base [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1254.415880] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-496d8a01-20c5-4846-92d2-4b86030fc6b7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.435374] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8bb5a071-ed51-42f7-9848-66e84f47f731 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.440778] env[68217]: DEBUG oslo_vmware.api [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1254.440778] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]522dd75f-cff8-972a-7694-e7989ac24b08" [ 1254.440778] env[68217]: _type = "Task" [ 1254.440778] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.448088] env[68217]: DEBUG oslo_vmware.api [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522dd75f-cff8-972a-7694-e7989ac24b08, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.482157] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962249, 'name': CreateVM_Task, 'duration_secs': 1.192109} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.482311] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1254.482957] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.483130] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.483441] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1254.483699] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3710b07-c5d3-4c75-9809-069b1c624ae1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.487793] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1254.487793] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52abd157-3f8d-fc8c-7e47-a5abea190391" [ 1254.487793] env[68217]: _type = "Task" [ 1254.487793] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.495145] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52abd157-3f8d-fc8c-7e47-a5abea190391, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.950564] env[68217]: DEBUG oslo_vmware.api [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]522dd75f-cff8-972a-7694-e7989ac24b08, 'name': SearchDatastore_Task, 'duration_secs': 0.022795} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.950919] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1254.951188] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1254.997934] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52abd157-3f8d-fc8c-7e47-a5abea190391, 'name': SearchDatastore_Task, 'duration_secs': 0.028851} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.998246] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1254.998534] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.998766] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1254.998910] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1254.999101] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1254.999360] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aad835ea-2ebc-4dbc-abf1-0f7c65279760 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.008536] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1255.008677] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1255.009422] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1600de52-6e88-4a68-a6ee-8fc27229bc95 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.014479] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1255.014479] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52905b59-e315-415c-cf26-aacd4ec3ea94" [ 1255.014479] env[68217]: _type = "Task" [ 1255.014479] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.022086] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52905b59-e315-415c-cf26-aacd4ec3ea94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.526918] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52905b59-e315-415c-cf26-aacd4ec3ea94, 'name': SearchDatastore_Task, 'duration_secs': 0.011078} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1255.527815] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-795396ae-78ad-4cee-873d-e5e824b2f874 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.535219] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1255.535219] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]526799c5-6b6d-6c10-5654-e9d65beb757a" [ 1255.535219] env[68217]: _type = "Task" [ 1255.535219] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1255.543970] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526799c5-6b6d-6c10-5654-e9d65beb757a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1255.587357] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ed78a7-8dbf-4567-8edc-8b6a43a06705 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.595988] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6fc08e-1961-4953-9b7d-0c4690fb2606 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.636169] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ecd21f1-5404-4035-a83b-999242629af9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.643797] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c305b0a7-ceb0-4a63-8057-60fbb7a613b2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1255.656984] env[68217]: DEBUG nova.compute.provider_tree [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1255.950107] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.950107] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.950107] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1255.950310] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1255.950488] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1255.953243] env[68217]: INFO nova.compute.manager [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Terminating instance [ 1256.046365] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]526799c5-6b6d-6c10-5654-e9d65beb757a, 'name': SearchDatastore_Task, 'duration_secs': 0.025015} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.046751] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1256.047011] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 945e4574-75b7-4ff7-8e0e-0fee0c90bef1/945e4574-75b7-4ff7-8e0e-0fee0c90bef1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1256.047357] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-86f5d26a-c767-4994-bf25-772b2e334634 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.054029] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1256.054029] env[68217]: value = "task-2962251" [ 1256.054029] env[68217]: _type = "Task" [ 1256.054029] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.062209] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962251, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.160141] env[68217]: DEBUG nova.scheduler.client.report [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1256.457302] env[68217]: DEBUG nova.compute.manager [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1256.457521] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1256.458383] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bb73f53-7d0b-41cf-a5f1-99f0ee5f0ae0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.466073] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1256.466356] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ff87dbbf-66c5-450d-91bf-69f12f426eda {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1256.473086] env[68217]: DEBUG oslo_vmware.api [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1256.473086] env[68217]: value = "task-2962252" [ 1256.473086] env[68217]: _type = "Task" [ 1256.473086] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1256.482084] env[68217]: DEBUG oslo_vmware.api [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962252, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.565947] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962251, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1256.983163] env[68217]: DEBUG oslo_vmware.api [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962252, 'name': PowerOffVM_Task, 'duration_secs': 0.250705} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1256.983424] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1256.983584] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1256.983871] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ccfa6242-a765-444a-b78e-4b63e7d5d203 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.044480] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1257.044735] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1257.044839] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleting the datastore file [datastore2] 9844e40f-29ed-48b9-a48f-85fbe10ae2fb {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1257.045118] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3813ec83-7f84-4346-bbc3-d02efa76ed8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.052522] env[68217]: DEBUG oslo_vmware.api [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1257.052522] env[68217]: value = "task-2962254" [ 1257.052522] env[68217]: _type = "Task" [ 1257.052522] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.064359] env[68217]: DEBUG oslo_vmware.api [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962254, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.067371] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962251, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.592188} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.067612] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 945e4574-75b7-4ff7-8e0e-0fee0c90bef1/945e4574-75b7-4ff7-8e0e-0fee0c90bef1.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1257.067850] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1257.068108] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-92c83e71-9083-4c91-8d71-ed56d057dd2d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.073699] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1257.073699] env[68217]: value = "task-2962255" [ 1257.073699] env[68217]: _type = "Task" [ 1257.073699] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.081564] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962255, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.174446] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.223s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1257.562681] env[68217]: DEBUG oslo_vmware.api [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962254, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.206311} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.562872] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1257.563074] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1257.563256] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1257.563431] env[68217]: INFO nova.compute.manager [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1257.563676] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1257.563873] env[68217]: DEBUG nova.compute.manager [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1257.563971] env[68217]: DEBUG nova.network.neutron [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1257.583019] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962255, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075811} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1257.583282] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1257.584045] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e04fe6-0970-4b65-a267-b26b3ea10bc2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.606132] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Reconfiguring VM instance instance-0000007c to attach disk [datastore2] 945e4574-75b7-4ff7-8e0e-0fee0c90bef1/945e4574-75b7-4ff7-8e0e-0fee0c90bef1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1257.608563] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6e47a16f-4aca-4882-af42-983d16785645 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1257.628357] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1257.628357] env[68217]: value = "task-2962256" [ 1257.628357] env[68217]: _type = "Task" [ 1257.628357] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1257.636152] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962256, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1257.741245] env[68217]: INFO nova.scheduler.client.report [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted allocation for migration 920b0cb6-9983-47f6-9c8e-3515082d2210 [ 1258.137859] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962256, 'name': ReconfigVM_Task, 'duration_secs': 0.441934} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.138232] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Reconfigured VM instance instance-0000007c to attach disk [datastore2] 945e4574-75b7-4ff7-8e0e-0fee0c90bef1/945e4574-75b7-4ff7-8e0e-0fee0c90bef1.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1258.138783] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-84e52fee-68b2-42e4-879d-3a4fa362fc4d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.144551] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1258.144551] env[68217]: value = "task-2962257" [ 1258.144551] env[68217]: _type = "Task" [ 1258.144551] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.153953] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962257, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1258.250101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-5fe2a3e6-b78f-4c1d-a12d-920f09948c9b tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.643s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1258.425547] env[68217]: INFO nova.compute.manager [None req-e1d624de-5b65-4cbf-92d9-125389560961 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Get console output [ 1258.425919] env[68217]: WARNING nova.virt.vmwareapi.driver [None req-e1d624de-5b65-4cbf-92d9-125389560961 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] The console log is missing. Check your VSPC configuration [ 1258.525119] env[68217]: DEBUG nova.compute.manager [req-f5d47c12-ac27-47c0-b9ad-5c1f2e0fb064 req-7f6c12a8-c3e2-4785-9279-0903124dc7e4 service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Received event network-vif-deleted-4873ab51-6a06-44e0-a653-3dfbaa42a0d1 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1258.525391] env[68217]: INFO nova.compute.manager [req-f5d47c12-ac27-47c0-b9ad-5c1f2e0fb064 req-7f6c12a8-c3e2-4785-9279-0903124dc7e4 service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Neutron deleted interface 4873ab51-6a06-44e0-a653-3dfbaa42a0d1; detaching it from the instance and deleting it from the info cache [ 1258.525529] env[68217]: DEBUG nova.network.neutron [req-f5d47c12-ac27-47c0-b9ad-5c1f2e0fb064 req-7f6c12a8-c3e2-4785-9279-0903124dc7e4 service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1258.655018] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962257, 'name': Rename_Task, 'duration_secs': 0.215993} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1258.655347] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1258.655603] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-78a6848f-144b-4feb-ab32-2d441108b195 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1258.662282] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1258.662282] env[68217]: value = "task-2962258" [ 1258.662282] env[68217]: _type = "Task" [ 1258.662282] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1258.670724] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962258, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.008242] env[68217]: DEBUG nova.network.neutron [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1259.028169] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-73971b46-3395-4eba-8935-6f7b5e70d730 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.038568] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c47c1e7f-2b3e-43d8-86b9-f9d63fa66e3f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.071488] env[68217]: DEBUG nova.compute.manager [req-f5d47c12-ac27-47c0-b9ad-5c1f2e0fb064 req-7f6c12a8-c3e2-4785-9279-0903124dc7e4 service nova] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Detach interface failed, port_id=4873ab51-6a06-44e0-a653-3dfbaa42a0d1, reason: Instance 9844e40f-29ed-48b9-a48f-85fbe10ae2fb could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1259.177509] env[68217]: DEBUG oslo_vmware.api [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962258, 'name': PowerOnVM_Task, 'duration_secs': 0.428666} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.177509] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1259.177509] env[68217]: INFO nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Took 9.60 seconds to spawn the instance on the hypervisor. [ 1259.177509] env[68217]: DEBUG nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1259.177509] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1235cd10-b91f-45fd-85fb-4345792d57f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.241475] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5204b062-584b-e043-ed78-5ffa8e679347/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1259.242688] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c26d98a-54a2-4ae6-91ca-c4a9a74bbaf3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.248871] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5204b062-584b-e043-ed78-5ffa8e679347/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1259.249043] env[68217]: ERROR oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Aborting lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5204b062-584b-e043-ed78-5ffa8e679347/disk-0.vmdk due to incomplete transfer. [ 1259.249262] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-35545346-2efe-45a1-8886-b61bd204c9b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.256466] env[68217]: DEBUG oslo_vmware.rw_handles [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Closed VMDK read handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/5204b062-584b-e043-ed78-5ffa8e679347/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 1259.256654] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Uploaded image 1a1f3e1d-f17f-452a-bdff-e6a0645b108c to the Glance image server {{(pid=68217) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 1259.259019] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Destroying the VM {{(pid=68217) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 1259.259527] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-7fae907b-f7df-4955-923e-9867137fdf64 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.265261] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1259.265261] env[68217]: value = "task-2962260" [ 1259.265261] env[68217]: _type = "Task" [ 1259.265261] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.275066] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962260, 'name': Destroy_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1259.511641] env[68217]: INFO nova.compute.manager [-] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Took 1.95 seconds to deallocate network for instance. [ 1259.694768] env[68217]: INFO nova.compute.manager [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Took 15.15 seconds to build instance. [ 1259.774848] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962260, 'name': Destroy_Task, 'duration_secs': 0.304753} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1259.775086] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Destroyed the VM [ 1259.775336] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Deleting Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 1259.775594] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-4374c630-737e-47c7-9ff5-446aaaffe683 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1259.781614] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1259.781614] env[68217]: value = "task-2962261" [ 1259.781614] env[68217]: _type = "Task" [ 1259.781614] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1259.789250] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962261, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.018308] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.018706] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.018962] env[68217]: DEBUG nova.objects.instance [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'resources' on Instance uuid 9844e40f-29ed-48b9-a48f-85fbe10ae2fb {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1260.104261] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.196925] env[68217]: DEBUG oslo_concurrency.lockutils [None req-86969085-8b08-4558-8585-17202487e5fc tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.662s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.197235] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.093s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.197440] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1260.197644] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1260.197812] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1260.200059] env[68217]: INFO nova.compute.manager [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Terminating instance [ 1260.291495] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962261, 'name': RemoveSnapshot_Task, 'duration_secs': 0.356517} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1260.291754] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Deleted Snapshot of the VM instance {{(pid=68217) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 1260.292035] env[68217]: DEBUG nova.compute.manager [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1260.292771] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2dfa393-ea73-437d-acaf-63b58cca3d8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.617772] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16b48b6f-a6ad-4957-b8fb-afa816b3db99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.625640] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f3a52f-03f8-44b1-a28c-27141a041521 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.657305] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11425add-ec74-4c30-8eb5-3db98bee9074 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.665034] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e264788-c712-4057-830a-108d16f14feb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.677595] env[68217]: DEBUG nova.compute.provider_tree [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1260.704061] env[68217]: DEBUG nova.compute.manager [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1260.704296] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1260.705381] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e925ea5b-aa23-49c3-8a75-f26be763db1a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.712838] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1260.713108] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e6a4f164-6f79-4a19-8b2e-24d3048d1b8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1260.720224] env[68217]: DEBUG oslo_vmware.api [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1260.720224] env[68217]: value = "task-2962262" [ 1260.720224] env[68217]: _type = "Task" [ 1260.720224] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1260.728652] env[68217]: DEBUG oslo_vmware.api [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962262, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1260.804837] env[68217]: INFO nova.compute.manager [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Shelve offloading [ 1261.197950] env[68217]: ERROR nova.scheduler.client.report [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [req-dcb8dbd0-9520-4bf3-a1d6-593ccfcd28f4] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-dcb8dbd0-9520-4bf3-a1d6-593ccfcd28f4"}]} [ 1261.213642] env[68217]: DEBUG nova.scheduler.client.report [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1261.229905] env[68217]: DEBUG oslo_vmware.api [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962262, 'name': PowerOffVM_Task, 'duration_secs': 0.186372} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.230757] env[68217]: DEBUG nova.scheduler.client.report [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1261.230961] env[68217]: DEBUG nova.compute.provider_tree [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1261.232858] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1261.233048] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1261.233475] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f9468eac-7629-41de-b1e4-becaf21dcfe3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.244383] env[68217]: DEBUG nova.scheduler.client.report [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1261.262121] env[68217]: DEBUG nova.scheduler.client.report [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1261.290840] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1261.291129] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1261.291323] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Deleting the datastore file [datastore2] 945e4574-75b7-4ff7-8e0e-0fee0c90bef1 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1261.291580] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d31171bc-7292-41c2-916b-ba1a12e96e40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.299068] env[68217]: DEBUG oslo_vmware.api [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for the task: (returnval){ [ 1261.299068] env[68217]: value = "task-2962264" [ 1261.299068] env[68217]: _type = "Task" [ 1261.299068] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.310458] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1261.310738] env[68217]: DEBUG oslo_vmware.api [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962264, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.313172] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cac9dfbf-ce00-4ec2-8caf-330321ecd467 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.318757] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1261.318757] env[68217]: value = "task-2962265" [ 1261.318757] env[68217]: _type = "Task" [ 1261.318757] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1261.325930] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962265, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1261.371022] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47c0ca4-443a-489c-8e16-00d8eb13b665 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.376313] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30116f3c-aec2-4ee9-b54c-98d70df108a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.407066] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23112d50-880c-437a-a66f-5963a94f72c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.414123] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ea24129-28ea-428d-a532-fe327d53e659 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.427491] env[68217]: DEBUG nova.compute.provider_tree [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1261.811173] env[68217]: DEBUG oslo_vmware.api [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Task: {'id': task-2962264, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.127198} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1261.811464] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1261.811652] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1261.811828] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1261.811998] env[68217]: INFO nova.compute.manager [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1261.812246] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1261.812441] env[68217]: DEBUG nova.compute.manager [-] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1261.812546] env[68217]: DEBUG nova.network.neutron [-] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1261.827717] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] VM already powered off {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1261.827920] env[68217]: DEBUG nova.compute.manager [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1261.828683] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b44179ad-138a-47d5-8a33-1ac026f9281f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1261.834050] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1261.834220] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1261.834383] env[68217]: DEBUG nova.network.neutron [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1261.961305] env[68217]: DEBUG nova.scheduler.client.report [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 174 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1261.961570] env[68217]: DEBUG nova.compute.provider_tree [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 174 to 175 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1261.961750] env[68217]: DEBUG nova.compute.provider_tree [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1262.083367] env[68217]: DEBUG nova.compute.manager [req-9c8958d0-c710-45f2-ae38-23a9df52497a req-47847ea1-965b-47a4-ac3b-95392aea8ce9 service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Received event network-vif-deleted-4981c3d0-c0c4-444d-9992-6f6f173aedd2 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1262.083574] env[68217]: INFO nova.compute.manager [req-9c8958d0-c710-45f2-ae38-23a9df52497a req-47847ea1-965b-47a4-ac3b-95392aea8ce9 service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Neutron deleted interface 4981c3d0-c0c4-444d-9992-6f6f173aedd2; detaching it from the instance and deleting it from the info cache [ 1262.083848] env[68217]: DEBUG nova.network.neutron [req-9c8958d0-c710-45f2-ae38-23a9df52497a req-47847ea1-965b-47a4-ac3b-95392aea8ce9 service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.466981] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.448s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1262.486468] env[68217]: INFO nova.scheduler.client.report [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted allocations for instance 9844e40f-29ed-48b9-a48f-85fbe10ae2fb [ 1262.559545] env[68217]: DEBUG nova.network.neutron [-] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.589270] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76f0c123-6410-49e4-bd30-9797385e9b66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.609048] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61854a04-0f2e-4b55-9950-dcd1d2214711 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1262.636259] env[68217]: DEBUG nova.compute.manager [req-9c8958d0-c710-45f2-ae38-23a9df52497a req-47847ea1-965b-47a4-ac3b-95392aea8ce9 service nova] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Detach interface failed, port_id=4981c3d0-c0c4-444d-9992-6f6f173aedd2, reason: Instance 945e4574-75b7-4ff7-8e0e-0fee0c90bef1 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1262.691520] env[68217]: DEBUG nova.network.neutron [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15c3165f-90", "ovs_interfaceid": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1262.994268] env[68217]: DEBUG oslo_concurrency.lockutils [None req-57a5f94a-85d9-48a7-a995-46255207c0cd tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "9844e40f-29ed-48b9-a48f-85fbe10ae2fb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.044s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1263.062467] env[68217]: INFO nova.compute.manager [-] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Took 1.25 seconds to deallocate network for instance. [ 1263.196672] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1263.568890] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1263.570092] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1263.570570] env[68217]: DEBUG nova.objects.instance [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lazy-loading 'resources' on Instance uuid 945e4574-75b7-4ff7-8e0e-0fee0c90bef1 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1263.691141] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1263.691141] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aee7837a-5758-441f-b73c-6fd75c4dac39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.699149] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1263.699149] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f52e7aa0-c5c3-4bb4-bec8-18c809cec73a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.776472] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1263.776707] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1263.776849] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleting the datastore file [datastore1] b5fdce0e-465a-4cf0-9a15-313bba7a11e9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1263.778397] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8249ed9d-42b6-490f-b485-c54d463301c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1263.785455] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1263.785455] env[68217]: value = "task-2962267" [ 1263.785455] env[68217]: _type = "Task" [ 1263.785455] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1263.795636] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962267, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1264.134528] env[68217]: DEBUG nova.compute.manager [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-vif-unplugged-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1264.134528] env[68217]: DEBUG oslo_concurrency.lockutils [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.134528] env[68217]: DEBUG oslo_concurrency.lockutils [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1264.134528] env[68217]: DEBUG oslo_concurrency.lockutils [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1264.134528] env[68217]: DEBUG nova.compute.manager [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] No waiting events found dispatching network-vif-unplugged-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1264.134528] env[68217]: WARNING nova.compute.manager [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received unexpected event network-vif-unplugged-15c3165f-90ba-4321-8d1f-aea389ccc77a for instance with vm_state shelved and task_state shelving_offloading. [ 1264.134528] env[68217]: DEBUG nova.compute.manager [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1264.134528] env[68217]: DEBUG nova.compute.manager [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing instance network info cache due to event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1264.134528] env[68217]: DEBUG oslo_concurrency.lockutils [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] Acquiring lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1264.134528] env[68217]: DEBUG oslo_concurrency.lockutils [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] Acquired lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1264.135051] env[68217]: DEBUG nova.network.neutron [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1264.182504] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e49be13-96f4-4ccc-a9aa-e86c2808fde8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.190431] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc844c38-b177-4bfb-9cce-b063b565ab0b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.222724] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45f72072-dcf7-4079-8252-dfaf25797eee {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.230490] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bec6343-6110-43a2-ad57-99a70232e71d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.244696] env[68217]: DEBUG nova.compute.provider_tree [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1264.295412] env[68217]: DEBUG oslo_vmware.api [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962267, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.296298} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1264.296123] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1264.296123] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1264.296123] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1264.313445] env[68217]: INFO nova.scheduler.client.report [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted allocations for instance b5fdce0e-465a-4cf0-9a15-313bba7a11e9 [ 1264.767618] env[68217]: ERROR nova.scheduler.client.report [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] [req-c8b53d68-b9c4-4eca-a4f8-9a6c3b952b34] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID 42aedcce-ee61-45e1-bf10-c06056d1f548. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c8b53d68-b9c4-4eca-a4f8-9a6c3b952b34"}]} [ 1264.787126] env[68217]: DEBUG nova.scheduler.client.report [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Refreshing inventories for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:822}} [ 1264.800922] env[68217]: DEBUG nova.scheduler.client.report [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating ProviderTree inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:786}} [ 1264.801207] env[68217]: DEBUG nova.compute.provider_tree [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 153, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1264.812431] env[68217]: DEBUG nova.scheduler.client.report [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Refreshing aggregate associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, aggregates: None {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:831}} [ 1264.818188] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1264.831903] env[68217]: DEBUG nova.scheduler.client.report [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Refreshing trait associations for resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548, traits: COMPUTE_IMAGE_TYPE_ISO,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_NODE,HW_ARCH_X86_64,COMPUTE_IMAGE_TYPE_VMDK {{(pid=68217) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:843}} [ 1264.849867] env[68217]: DEBUG nova.network.neutron [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updated VIF entry in instance network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1264.850218] env[68217]: DEBUG nova.network.neutron [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": null, "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap15c3165f-90", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1264.918894] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9db98ae0-572a-444d-a0e0-be195f251669 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.926126] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-592574d1-a3c0-4f41-b7a2-67aecf305041 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.957468] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14616f84-deab-42ec-8a6a-05a206431b65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.964633] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd4ea60d-9752-4e60-a8aa-505b47b048c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1264.977727] env[68217]: DEBUG nova.compute.provider_tree [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1265.212841] env[68217]: DEBUG nova.compute.manager [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1265.352533] env[68217]: DEBUG oslo_concurrency.lockutils [req-65068d35-5aca-4e52-abc9-37f7655ee83d req-4d527d4a-fd54-4ec1-ae22-3c3df4dead6a service nova] Releasing lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1265.509215] env[68217]: DEBUG nova.scheduler.client.report [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updated inventory for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with generation 177 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:975}} [ 1265.509482] env[68217]: DEBUG nova.compute.provider_tree [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating resource provider 42aedcce-ee61-45e1-bf10-c06056d1f548 generation from 177 to 178 during operation: update_inventory {{(pid=68217) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1265.509682] env[68217]: DEBUG nova.compute.provider_tree [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Updating inventory in ProviderTree for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1265.730407] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.014141] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.445s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1266.016374] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.198s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1266.016730] env[68217]: DEBUG nova.objects.instance [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'resources' on Instance uuid b5fdce0e-465a-4cf0-9a15-313bba7a11e9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1266.032803] env[68217]: INFO nova.scheduler.client.report [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Deleted allocations for instance 945e4574-75b7-4ff7-8e0e-0fee0c90bef1 [ 1266.474659] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1266.520768] env[68217]: DEBUG nova.objects.instance [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'numa_topology' on Instance uuid b5fdce0e-465a-4cf0-9a15-313bba7a11e9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1266.539848] env[68217]: DEBUG oslo_concurrency.lockutils [None req-f07c63e5-ad7f-4874-a1e3-d8b10e19951e tempest-InstanceActionsNegativeTestJSON-1062940986 tempest-InstanceActionsNegativeTestJSON-1062940986-project-member] Lock "945e4574-75b7-4ff7-8e0e-0fee0c90bef1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.342s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1267.026826] env[68217]: DEBUG nova.objects.base [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Object Instance lazy-loaded attributes: resources,numa_topology {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1267.133022] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4873c6-bb4d-4da3-aa1c-ac0e314c0fd3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.139617] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-219d6277-36b4-4f10-92bd-4e6c1a14c2ef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.174705] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d65335-7631-4078-bcd4-63779e8e8ee4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.184691] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e933aa1-8021-4cd1-b322-20a846117ea4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1267.209556] env[68217]: DEBUG nova.compute.provider_tree [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1267.711314] env[68217]: DEBUG nova.scheduler.client.report [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1268.216299] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.200s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.218817] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 2.489s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.724886] env[68217]: INFO nova.compute.claims [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1268.729866] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e5cf44eb-03d1-4aae-b4a4-10fd7c47e082 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 23.243s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1268.730647] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.256s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1268.730823] env[68217]: INFO nova.compute.manager [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Unshelving [ 1269.232175] env[68217]: INFO nova.compute.resource_tracker [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating resource usage from migration eba49941-ecfd-44d1-bfe9-04f171b1fe1c [ 1269.320308] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4d62574-1a8e-437e-a7e8-84889efe2ca7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.327860] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-851dbf2b-3a22-4d0f-9472-d71806e51dd6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.356832] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07a4b77b-0ff2-4a5b-a536-aaeee47bf002 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.363347] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33a3ed17-5c2c-4dc9-bd50-94f73efc70e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1269.376148] env[68217]: DEBUG nova.compute.provider_tree [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1269.755431] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1269.878762] env[68217]: DEBUG nova.scheduler.client.report [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1270.383891] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.165s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1270.384270] env[68217]: INFO nova.compute.manager [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Migrating [ 1270.390504] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.635s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1270.390653] env[68217]: DEBUG nova.objects.instance [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'pci_requests' on Instance uuid b5fdce0e-465a-4cf0-9a15-313bba7a11e9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.900379] env[68217]: DEBUG nova.objects.instance [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'numa_topology' on Instance uuid b5fdce0e-465a-4cf0-9a15-313bba7a11e9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1270.901926] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1270.902168] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1270.902356] env[68217]: DEBUG nova.network.neutron [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1271.404803] env[68217]: INFO nova.compute.claims [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1271.607458] env[68217]: DEBUG nova.network.neutron [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1272.109855] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1272.493891] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fb3e607-25f0-4736-a873-06e7b0a85d58 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.501736] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d765ccfa-c266-4da2-9dae-94e316f2808e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.531487] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c0f0de-300b-4953-84b1-c039aa053659 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.537956] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8ea788-6d22-464b-9e80-ef310f9a5f8e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1272.550728] env[68217]: DEBUG nova.compute.provider_tree [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1273.053926] env[68217]: DEBUG nova.scheduler.client.report [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1273.558992] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.168s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1273.595678] env[68217]: INFO nova.network.neutron [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating port 15c3165f-90ba-4321-8d1f-aea389ccc77a with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 1273.624312] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0445c7-731e-4236-8ae8-af48a3e43ec5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1273.642809] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance 'd28bcf16-b081-4dc8-a975-2acaed222e15' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1274.149112] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1274.149414] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e629c695-8b1c-4a69-8796-458ea58770be {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.157040] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1274.157040] env[68217]: value = "task-2962268" [ 1274.157040] env[68217]: _type = "Task" [ 1274.157040] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1274.165089] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962268, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1274.669068] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962268, 'name': PowerOffVM_Task, 'duration_secs': 0.190503} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1274.669421] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1274.669514] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance 'd28bcf16-b081-4dc8-a975-2acaed222e15' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1274.962529] env[68217]: DEBUG nova.compute.manager [req-5dcebdac-e969-469c-b617-171e2d54eeb6 req-fd73d769-3794-4d78-a325-e097a3d27f3d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-vif-plugged-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1274.962752] env[68217]: DEBUG oslo_concurrency.lockutils [req-5dcebdac-e969-469c-b617-171e2d54eeb6 req-fd73d769-3794-4d78-a325-e097a3d27f3d service nova] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1274.963086] env[68217]: DEBUG oslo_concurrency.lockutils [req-5dcebdac-e969-469c-b617-171e2d54eeb6 req-fd73d769-3794-4d78-a325-e097a3d27f3d service nova] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1274.963264] env[68217]: DEBUG oslo_concurrency.lockutils [req-5dcebdac-e969-469c-b617-171e2d54eeb6 req-fd73d769-3794-4d78-a325-e097a3d27f3d service nova] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1274.963434] env[68217]: DEBUG nova.compute.manager [req-5dcebdac-e969-469c-b617-171e2d54eeb6 req-fd73d769-3794-4d78-a325-e097a3d27f3d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] No waiting events found dispatching network-vif-plugged-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1274.963598] env[68217]: WARNING nova.compute.manager [req-5dcebdac-e969-469c-b617-171e2d54eeb6 req-fd73d769-3794-4d78-a325-e097a3d27f3d service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received unexpected event network-vif-plugged-15c3165f-90ba-4321-8d1f-aea389ccc77a for instance with vm_state shelved_offloaded and task_state spawning. [ 1275.046965] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1275.047146] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1275.047282] env[68217]: DEBUG nova.network.neutron [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1275.175334] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1275.175590] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1275.175744] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1275.175921] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1275.176076] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1275.176223] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1275.176422] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1275.176588] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1275.176773] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1275.176960] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1275.177118] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1275.182270] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f65da1bb-a8d1-4416-92d4-5b102afc3ea4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.197965] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1275.197965] env[68217]: value = "task-2962269" [ 1275.197965] env[68217]: _type = "Task" [ 1275.197965] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1275.206006] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962269, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1275.708126] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962269, 'name': ReconfigVM_Task, 'duration_secs': 0.15235} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1275.708454] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance 'd28bcf16-b081-4dc8-a975-2acaed222e15' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1275.744290] env[68217]: DEBUG nova.network.neutron [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15c3165f-90", "ovs_interfaceid": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1276.219024] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1276.219024] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.219024] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1276.219024] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.219517] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1276.219831] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1276.220211] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1276.221155] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1276.221155] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1276.221155] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1276.221155] env[68217]: DEBUG nova.virt.hardware [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1276.226608] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Reconfiguring VM instance instance-00000064 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1276.226933] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-da7d9835-6639-4b88-addc-4224569c79a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.245869] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1276.245869] env[68217]: value = "task-2962270" [ 1276.245869] env[68217]: _type = "Task" [ 1276.245869] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.246387] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1276.256298] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962270, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.276000] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='1a780220177bad73b130510ef69cf8b2',container_format='bare',created_at=2025-03-12T08:26:24Z,direct_url=,disk_format='vmdk',id=1a1f3e1d-f17f-452a-bdff-e6a0645b108c,min_disk=1,min_ram=0,name='tempest-ServerActionsTestOtherB-server-762302015-shelved',owner='90ad2b0a8a0743ca80a0685bf56e0446',properties=ImageMetaProps,protected=,size=31669760,status='active',tags=,updated_at=2025-03-12T08:26:39Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1276.276262] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1276.276449] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1276.276692] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1276.276862] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1276.277059] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1276.277313] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1276.277493] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1276.277694] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1276.277895] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1276.278128] env[68217]: DEBUG nova.virt.hardware [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1276.279732] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d54aa92-fd8f-4220-bc0d-33aa166e6481 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.286417] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68189c8d-7f3d-4f2c-ac66-bd3e24419d27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.299802] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ea:06:46', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15c3165f-90ba-4321-8d1f-aea389ccc77a', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1276.306962] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1276.307212] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1276.307416] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb92d31b-7a7c-46d4-b17d-3ef524639a00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.325949] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1276.325949] env[68217]: value = "task-2962271" [ 1276.325949] env[68217]: _type = "Task" [ 1276.325949] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.333440] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962271, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.756066] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962270, 'name': ReconfigVM_Task, 'duration_secs': 0.170204} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.756418] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Reconfigured VM instance instance-00000064 to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1276.757116] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ace176f-0757-4391-98a0-5ad039233d37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.778049] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] d28bcf16-b081-4dc8-a975-2acaed222e15/d28bcf16-b081-4dc8-a975-2acaed222e15.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1276.778281] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b0a1e870-34f9-41b1-9db3-87e64844b02b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.795680] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1276.795680] env[68217]: value = "task-2962272" [ 1276.795680] env[68217]: _type = "Task" [ 1276.795680] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.804370] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962272, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.834692] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962271, 'name': CreateVM_Task, 'duration_secs': 0.292749} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1276.834882] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1276.835535] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.835707] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.836124] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1276.836390] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b2381607-abb4-4ce0-a42a-3e73c2f0bfcb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1276.840979] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1276.840979] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52353dc0-0e1d-e47e-95c7-9b2d786fa497" [ 1276.840979] env[68217]: _type = "Task" [ 1276.840979] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1276.848608] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52353dc0-0e1d-e47e-95c7-9b2d786fa497, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1276.988241] env[68217]: DEBUG nova.compute.manager [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1276.988241] env[68217]: DEBUG nova.compute.manager [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing instance network info cache due to event network-changed-15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1276.988241] env[68217]: DEBUG oslo_concurrency.lockutils [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] Acquiring lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1276.988241] env[68217]: DEBUG oslo_concurrency.lockutils [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] Acquired lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1276.988241] env[68217]: DEBUG nova.network.neutron [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Refreshing network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1277.304935] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962272, 'name': ReconfigVM_Task, 'duration_secs': 0.241497} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1277.305233] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Reconfigured VM instance instance-00000064 to attach disk [datastore1] d28bcf16-b081-4dc8-a975-2acaed222e15/d28bcf16-b081-4dc8-a975-2acaed222e15.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1277.305484] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance 'd28bcf16-b081-4dc8-a975-2acaed222e15' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1277.351465] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1277.351703] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Processing image 1a1f3e1d-f17f-452a-bdff-e6a0645b108c {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1277.351963] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c/1a1f3e1d-f17f-452a-bdff-e6a0645b108c.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1277.352130] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquired lock "[datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c/1a1f3e1d-f17f-452a-bdff-e6a0645b108c.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1277.352313] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1277.352551] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-be3cbeb7-371e-41aa-9dfc-747f72530746 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.360946] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1277.360946] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1277.361460] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f20d3e2-656b-466e-bea3-c243bf32ccb3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.366298] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1277.366298] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c6b001-ffdc-8f88-c3d1-b926ab794173" [ 1277.366298] env[68217]: _type = "Task" [ 1277.366298] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1277.373296] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c6b001-ffdc-8f88-c3d1-b926ab794173, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1277.686070] env[68217]: DEBUG nova.network.neutron [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updated VIF entry in instance network info cache for port 15c3165f-90ba-4321-8d1f-aea389ccc77a. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1277.686434] env[68217]: DEBUG nova.network.neutron [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [{"id": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "address": "fa:16:3e:ea:06:46", "network": {"id": "8fa1c488-b671-4c19-b6f7-4e8d8fe09864", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-742766236-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90ad2b0a8a0743ca80a0685bf56e0446", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f6ca3b2e-69a5-4cea-96a7-eaad5ec5fd9b", "external-id": "nsx-vlan-transportzone-989", "segmentation_id": 989, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15c3165f-90", "ovs_interfaceid": "15c3165f-90ba-4321-8d1f-aea389ccc77a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1277.812657] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f0f43b1-f27f-44be-91d1-a93415655c52 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.832076] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf798c21-1d16-4e10-a420-8fc37a3c54a8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.848813] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance 'd28bcf16-b081-4dc8-a975-2acaed222e15' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1277.874564] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Preparing fetch location {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1277.874803] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Fetch image to [datastore2] OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239/OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239.vmdk {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1277.874987] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Downloading stream optimized image 1a1f3e1d-f17f-452a-bdff-e6a0645b108c to [datastore2] OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239/OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239.vmdk on the data store datastore2 as vApp {{(pid=68217) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 1277.875176] env[68217]: DEBUG nova.virt.vmwareapi.images [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Downloading image file data 1a1f3e1d-f17f-452a-bdff-e6a0645b108c to the ESX as VM named 'OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239' {{(pid=68217) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 1277.942306] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 1277.942306] env[68217]: value = "resgroup-9" [ 1277.942306] env[68217]: _type = "ResourcePool" [ 1277.942306] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 1277.942621] env[68217]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-08ab0fce-1bbb-42f3-bcb2-09313074b414 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1277.963666] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease: (returnval){ [ 1277.963666] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d7e71-a2ad-c857-2705-0b312b659117" [ 1277.963666] env[68217]: _type = "HttpNfcLease" [ 1277.963666] env[68217]: } obtained for vApp import into resource pool (val){ [ 1277.963666] env[68217]: value = "resgroup-9" [ 1277.963666] env[68217]: _type = "ResourcePool" [ 1277.963666] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 1277.963966] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the lease: (returnval){ [ 1277.963966] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d7e71-a2ad-c857-2705-0b312b659117" [ 1277.963966] env[68217]: _type = "HttpNfcLease" [ 1277.963966] env[68217]: } to be ready. {{(pid=68217) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 1277.969980] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1277.969980] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d7e71-a2ad-c857-2705-0b312b659117" [ 1277.969980] env[68217]: _type = "HttpNfcLease" [ 1277.969980] env[68217]: } is initializing. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 1278.189587] env[68217]: DEBUG oslo_concurrency.lockutils [req-d0eed114-32c4-4dce-be78-28de35940ddb req-7ef96694-4823-433f-bd4c-b520e2e29b09 service nova] Releasing lock "refresh_cache-b5fdce0e-465a-4cf0-9a15-313bba7a11e9" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1278.387464] env[68217]: DEBUG nova.network.neutron [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Port 7ca41605-8ab9-4d01-835b-70d47e78fce9 binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1278.472219] env[68217]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 1278.472219] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d7e71-a2ad-c857-2705-0b312b659117" [ 1278.472219] env[68217]: _type = "HttpNfcLease" [ 1278.472219] env[68217]: } is ready. {{(pid=68217) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 1278.472529] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 1278.472529] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524d7e71-a2ad-c857-2705-0b312b659117" [ 1278.472529] env[68217]: _type = "HttpNfcLease" [ 1278.472529] env[68217]: }. {{(pid=68217) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 1278.473250] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb4c12a8-cfc3-4639-b6fe-8f887d00a308 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1278.480200] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52953d76-4a3b-5c6a-19c0-9f07d05a3373/disk-0.vmdk from lease info. {{(pid=68217) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 1278.480377] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating HTTP connection to write to file with size = 31669760 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52953d76-4a3b-5c6a-19c0-9f07d05a3373/disk-0.vmdk. {{(pid=68217) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1278.543379] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-00111ea5-77a4-4417-be06-a0e4b3a24cd9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.409379] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1279.409789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1279.409789] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1279.639170] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Completed reading data from the image iterator. {{(pid=68217) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1279.639402] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52953d76-4a3b-5c6a-19c0-9f07d05a3373/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 1279.640341] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de6cd943-0ecb-4ba8-b669-9096830e142d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.647194] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52953d76-4a3b-5c6a-19c0-9f07d05a3373/disk-0.vmdk is in state: ready. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 1279.647370] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52953d76-4a3b-5c6a-19c0-9f07d05a3373/disk-0.vmdk. {{(pid=68217) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 1279.647592] env[68217]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-3cc57d5f-8c43-4219-99fc-3606e148d58e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.826440] env[68217]: DEBUG oslo_vmware.rw_handles [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/52953d76-4a3b-5c6a-19c0-9f07d05a3373/disk-0.vmdk. {{(pid=68217) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 1279.826718] env[68217]: INFO nova.virt.vmwareapi.images [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Downloaded image file data 1a1f3e1d-f17f-452a-bdff-e6a0645b108c [ 1279.827578] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905a8283-46f9-4098-8f1a-f49775c7f8d9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.842400] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b9f2d2a1-657f-4f63-a51d-ec54ab8a0bb1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.871691] env[68217]: INFO nova.virt.vmwareapi.images [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] The imported VM was unregistered [ 1279.873981] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Caching image {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1279.874251] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Creating directory with path [datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1279.874504] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-332e16fe-58f6-4bdc-9ce6-b96ddf0e3337 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.893378] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Created directory with path [datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1279.893564] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Moving virtual disk from [datastore2] OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239/OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239.vmdk to [datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c/1a1f3e1d-f17f-452a-bdff-e6a0645b108c.vmdk. {{(pid=68217) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 1279.893790] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-2a492f39-6747-4783-a9a5-7f68b9efdfba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.899685] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1279.899685] env[68217]: value = "task-2962275" [ 1279.899685] env[68217]: _type = "Task" [ 1279.899685] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1279.907033] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962275, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.411557] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962275, 'name': MoveVirtualDisk_Task} progress is 24%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1280.450632] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1280.450867] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1280.451067] env[68217]: DEBUG nova.network.neutron [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1280.912996] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962275, 'name': MoveVirtualDisk_Task} progress is 46%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.415951] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962275, 'name': MoveVirtualDisk_Task} progress is 69%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.440737] env[68217]: DEBUG nova.network.neutron [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1281.913285] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962275, 'name': MoveVirtualDisk_Task} progress is 91%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1281.944138] env[68217]: DEBUG oslo_concurrency.lockutils [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1282.412566] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962275, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1282.469915] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e030217-6d06-4f5c-b52f-d46e5b6a60f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.488120] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae67e8dd-4dd7-436d-ab83-65a454fd37dd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.494764] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance 'd28bcf16-b081-4dc8-a975-2acaed222e15' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1282.913673] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962275, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.886739} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1282.913944] env[68217]: INFO nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Moved virtual disk from [datastore2] OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239/OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239.vmdk to [datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c/1a1f3e1d-f17f-452a-bdff-e6a0645b108c.vmdk. [ 1282.914148] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Cleaning up location [datastore2] OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 1282.914312] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleting the datastore file [datastore2] OSTACK_IMG_c16feb67-0188-496e-8865-b9d39a837239 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1282.914568] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9b20bca0-94d9-458f-9c90-b9394fcbba83 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1282.920595] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1282.920595] env[68217]: value = "task-2962276" [ 1282.920595] env[68217]: _type = "Task" [ 1282.920595] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1282.927668] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962276, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.000748] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1283.001076] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a97b8ad8-e97f-4385-9f42-edf594632057 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.008230] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1283.008230] env[68217]: value = "task-2962277" [ 1283.008230] env[68217]: _type = "Task" [ 1283.008230] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.015930] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.430942] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962276, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.210909} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1283.431230] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1283.431393] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Releasing lock "[datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c/1a1f3e1d-f17f-452a-bdff-e6a0645b108c.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1283.431634] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c/1a1f3e1d-f17f-452a-bdff-e6a0645b108c.vmdk to [datastore2] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1283.431880] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-61d0e676-5b76-48c9-9177-76bc7ddab297 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1283.439749] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1283.439749] env[68217]: value = "task-2962278" [ 1283.439749] env[68217]: _type = "Task" [ 1283.439749] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1283.446603] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962278, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.516768] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962277, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1283.954495] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962278, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.023776] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962277, 'name': PowerOnVM_Task} progress is 100%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.453808] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962278, 'name': CopyVirtualDisk_Task} progress is 43%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.521624] env[68217]: DEBUG oslo_vmware.api [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962277, 'name': PowerOnVM_Task, 'duration_secs': 1.02592} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.521997] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1284.521997] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-402433c6-102c-419b-a990-004bcaf7e0a9 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance 'd28bcf16-b081-4dc8-a975-2acaed222e15' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1284.954467] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962278, 'name': CopyVirtualDisk_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.354607] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.354810] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.355021] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1285.355213] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1285.355390] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1285.357658] env[68217]: INFO nova.compute.manager [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Terminating instance [ 1285.454629] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962278, 'name': CopyVirtualDisk_Task} progress is 88%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.861394] env[68217]: DEBUG nova.compute.manager [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1285.861822] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1285.861945] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fcf47607-1cb4-4f49-a011-d8ade7041097 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.869109] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1285.869109] env[68217]: value = "task-2962279" [ 1285.869109] env[68217]: _type = "Task" [ 1285.869109] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1285.877627] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962279, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.953923] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962278, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.237059} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1285.954190] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/1a1f3e1d-f17f-452a-bdff-e6a0645b108c/1a1f3e1d-f17f-452a-bdff-e6a0645b108c.vmdk to [datastore2] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1285.954915] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-785e00ce-91fe-4050-84ba-fb03e5bcbb66 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.977634] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Reconfiguring VM instance instance-0000007a to attach disk [datastore2] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1285.977943] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ac78c45d-81b2-4235-9bae-5cefa23cd4fa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1285.999242] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1285.999242] env[68217]: value = "task-2962280" [ 1285.999242] env[68217]: _type = "Task" [ 1285.999242] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.007250] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962280, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.378064] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962279, 'name': PowerOffVM_Task, 'duration_secs': 0.237334} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.378358] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1286.378587] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Volume detach. Driver type: vmdk {{(pid=68217) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1286.378827] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594420', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'name': 'volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'e11b2312-4cc2-4b49-bd26-22fd5629669d', 'attached_at': '2025-03-12T08:26:26.000000', 'detached_at': '', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'serial': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1286.379578] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-249de91f-f11b-4a88-8567-f2c39b97bc9d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.396954] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e09beb16-8eab-4f98-b707-e27ba9ad26e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.403559] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-063e2d7a-01c5-487a-a694-96183a93f4ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.420628] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a221eee8-4a1c-43c8-ab03-ab9123f7835d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.436351] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] The volume has not been displaced from its original location: [datastore1] volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55/volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55.vmdk. No consolidation needed. {{(pid=68217) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1286.441467] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfiguring VM instance instance-0000007b to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1286.441731] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-10195aa1-61a6-4468-b0c1-4b3558aaff9d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.458932] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1286.458932] env[68217]: value = "task-2962281" [ 1286.458932] env[68217]: _type = "Task" [ 1286.458932] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.466368] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962281, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.507594] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962280, 'name': ReconfigVM_Task, 'duration_secs': 0.275161} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.507861] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Reconfigured VM instance instance-0000007a to attach disk [datastore2] b5fdce0e-465a-4cf0-9a15-313bba7a11e9/b5fdce0e-465a-4cf0-9a15-313bba7a11e9.vmdk or device None with type streamOptimized {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1286.508504] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-e9f3c0e4-b3dc-4058-8f7a-4ca180438132 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.514256] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1286.514256] env[68217]: value = "task-2962282" [ 1286.514256] env[68217]: _type = "Task" [ 1286.514256] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.521515] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962282, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.834054] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1286.834338] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1286.834529] env[68217]: DEBUG nova.compute.manager [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Going to confirm migration 9 {{(pid=68217) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:5250}} [ 1286.968644] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962281, 'name': ReconfigVM_Task, 'duration_secs': 0.154956} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.969060] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Reconfigured VM instance instance-0000007b to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1286.973430] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f679dec5-2ae7-4e58-9914-a1cc49bed6c2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1286.987368] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1286.987368] env[68217]: value = "task-2962283" [ 1286.987368] env[68217]: _type = "Task" [ 1286.987368] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1286.995108] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962283, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.023539] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962282, 'name': Rename_Task, 'duration_secs': 0.367661} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.023788] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1287.024047] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ca2d383a-7ece-40e9-b5c8-9ef1d59b4e82 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.030375] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1287.030375] env[68217]: value = "task-2962284" [ 1287.030375] env[68217]: _type = "Task" [ 1287.030375] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.037500] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962284, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.349387] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1287.372387] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1287.372576] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1287.372747] env[68217]: DEBUG nova.network.neutron [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1287.372931] env[68217]: DEBUG nova.objects.instance [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'info_cache' on Instance uuid d28bcf16-b081-4dc8-a975-2acaed222e15 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1287.497768] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962283, 'name': ReconfigVM_Task, 'duration_secs': 0.209897} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.498081] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-594420', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'name': 'volume-5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False, 'enforce_multipath': False}, 'status': 'attaching', 'instance': 'e11b2312-4cc2-4b49-bd26-22fd5629669d', 'attached_at': '2025-03-12T08:26:26.000000', 'detached_at': '', 'volume_id': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55', 'serial': '5c535a76-9a0d-422d-88d0-9feb0c1b7b55'} {{(pid=68217) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1287.498351] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1287.499137] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce375a24-b8d9-4a70-be85-61906fc5786b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.505461] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1287.505693] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c342fcd1-ad42-4ec9-92f3-0708cf4dc9c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.538461] env[68217]: DEBUG oslo_vmware.api [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962284, 'name': PowerOnVM_Task, 'duration_secs': 0.433945} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1287.538696] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1287.563854] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1287.564129] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1287.564395] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleting the datastore file [datastore1] e11b2312-4cc2-4b49-bd26-22fd5629669d {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1287.564699] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-82d72d56-3f92-4187-b451-200e73753d5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.570738] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1287.570738] env[68217]: value = "task-2962286" [ 1287.570738] env[68217]: _type = "Task" [ 1287.570738] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1287.579384] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962286, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1287.642742] env[68217]: DEBUG nova.compute.manager [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1287.644086] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32dd34fc-4e74-4085-9fa6-7260eb651cef {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.852808] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.853071] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1287.853245] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1287.853407] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1287.854271] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e00b60a0-7ee9-44cd-b1f0-a42fb6d2ff90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.862242] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070590dc-a88c-44f0-8fff-50c9cae73765 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.878261] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59588e3f-f334-4090-969a-e379e1c4fda0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.884279] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1c35993-576e-4602-8fee-8d2ef345fcec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1287.913612] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180196MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1287.913760] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1287.913965] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1288.080608] env[68217]: DEBUG oslo_vmware.api [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962286, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.077396} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.081025] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1288.081025] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1288.081190] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1288.081363] env[68217]: INFO nova.compute.manager [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Took 2.22 seconds to destroy the instance on the hypervisor. [ 1288.081591] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1288.081771] env[68217]: DEBUG nova.compute.manager [-] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1288.081866] env[68217]: DEBUG nova.network.neutron [-] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1288.159845] env[68217]: DEBUG oslo_concurrency.lockutils [None req-81df330c-1b8e-41c4-83da-189dfcb47d62 tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 19.429s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1288.717035] env[68217]: DEBUG nova.compute.manager [req-956d6522-96c3-4497-8e0c-d16d7b56a235 req-09166b95-c782-4186-be37-e2b7010bf15f service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Received event network-vif-deleted-0680c857-993e-4996-8477-cfaa0a5727ad {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1288.717103] env[68217]: INFO nova.compute.manager [req-956d6522-96c3-4497-8e0c-d16d7b56a235 req-09166b95-c782-4186-be37-e2b7010bf15f service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Neutron deleted interface 0680c857-993e-4996-8477-cfaa0a5727ad; detaching it from the instance and deleting it from the info cache [ 1288.717290] env[68217]: DEBUG nova.network.neutron [req-956d6522-96c3-4497-8e0c-d16d7b56a235 req-09166b95-c782-4186-be37-e2b7010bf15f service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.774892] env[68217]: DEBUG nova.network.neutron [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [{"id": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "address": "fa:16:3e:26:5b:d2", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ca41605-8a", "ovs_interfaceid": "7ca41605-8ab9-4d01-835b-70d47e78fce9", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1288.921283] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Applying migration context for instance d28bcf16-b081-4dc8-a975-2acaed222e15 as it has an incoming, in-progress migration eba49941-ecfd-44d1-bfe9-04f171b1fe1c. Migration status is confirming {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1016}} [ 1288.922283] env[68217]: INFO nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating resource usage from migration eba49941-ecfd-44d1-bfe9-04f171b1fe1c [ 1288.943022] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.943022] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.943022] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 01c32252-f6e0-4cb0-966e-622872d49199 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.943022] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance e11b2312-4cc2-4b49-bd26-22fd5629669d actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.943281] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Migration eba49941-ecfd-44d1-bfe9-04f171b1fe1c is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1288.943281] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d28bcf16-b081-4dc8-a975-2acaed222e15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.943365] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance b5fdce0e-465a-4cf0-9a15-313bba7a11e9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1288.943492] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1288.943624] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1984MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1289.025970] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f614bbd-ebac-462a-8056-ca442f7ec644 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.033458] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b62b58f-4ea7-419c-a7f5-794ccddaf347 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.062920] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3af06a40-f600-4580-991a-cdd7248cf9e6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.069680] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a41b2a2-0ed2-4a26-a066-ef24ee079ec8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.081991] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1289.153649] env[68217]: DEBUG nova.network.neutron [-] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1289.221634] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-24434f33-d8ed-4241-841d-bd19cce70ca6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.231463] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab269e2-5ccb-42b0-a93a-447ab64cf471 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.260726] env[68217]: DEBUG nova.compute.manager [req-956d6522-96c3-4497-8e0c-d16d7b56a235 req-09166b95-c782-4186-be37-e2b7010bf15f service nova] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Detach interface failed, port_id=0680c857-993e-4996-8477-cfaa0a5727ad, reason: Instance e11b2312-4cc2-4b49-bd26-22fd5629669d could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1289.277597] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-d28bcf16-b081-4dc8-a975-2acaed222e15" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1289.277853] env[68217]: DEBUG nova.objects.instance [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'migration_context' on Instance uuid d28bcf16-b081-4dc8-a975-2acaed222e15 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1289.396101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.396367] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.396573] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1289.396753] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1289.397162] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1289.398933] env[68217]: INFO nova.compute.manager [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Terminating instance [ 1289.584588] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1289.655789] env[68217]: INFO nova.compute.manager [-] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Took 1.57 seconds to deallocate network for instance. [ 1289.781342] env[68217]: DEBUG nova.objects.base [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Object Instance lazy-loaded attributes: info_cache,migration_context {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1289.782448] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4c9e9e-7096-4b70-a9e2-bb45c3ee9609 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.802917] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b53086d5-33ac-4ab2-bfc9-8890652af0f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.807249] env[68217]: DEBUG oslo_vmware.api [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1289.807249] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8b35e-225e-c281-24f7-f6282e6c2121" [ 1289.807249] env[68217]: _type = "Task" [ 1289.807249] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.814815] env[68217]: DEBUG oslo_vmware.api [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8b35e-225e-c281-24f7-f6282e6c2121, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.902472] env[68217]: DEBUG nova.compute.manager [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1289.902696] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1289.903596] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-905f12ff-56f1-4aae-beec-75ac3c45051c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.911542] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1289.911770] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ba593a87-aa45-401d-a28f-e6efc0a47fe1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1289.917803] env[68217]: DEBUG oslo_vmware.api [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1289.917803] env[68217]: value = "task-2962287" [ 1289.917803] env[68217]: _type = "Task" [ 1289.917803] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1289.925732] env[68217]: DEBUG oslo_vmware.api [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962287, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.089044] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1290.089544] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.175s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1290.198490] env[68217]: INFO nova.compute.manager [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Took 0.54 seconds to detach 1 volumes for instance. [ 1290.200680] env[68217]: DEBUG nova.compute.manager [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Deleting volume: 5c535a76-9a0d-422d-88d0-9feb0c1b7b55 {{(pid=68217) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3282}} [ 1290.317433] env[68217]: DEBUG oslo_vmware.api [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52c8b35e-225e-c281-24f7-f6282e6c2121, 'name': SearchDatastore_Task, 'duration_secs': 0.007624} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.317693] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.317935] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1290.427273] env[68217]: DEBUG oslo_vmware.api [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962287, 'name': PowerOffVM_Task, 'duration_secs': 0.206453} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1290.427520] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1290.427682] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1290.427927] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-db8ed976-c5f1-4a5d-a2bc-d38637f5cbc6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.489769] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1290.489958] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1290.490140] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleting the datastore file [datastore2] b5fdce0e-465a-4cf0-9a15-313bba7a11e9 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1290.490398] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c588c427-39bc-4408-9a70-86a1e233542a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.496217] env[68217]: DEBUG oslo_vmware.api [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for the task: (returnval){ [ 1290.496217] env[68217]: value = "task-2962290" [ 1290.496217] env[68217]: _type = "Task" [ 1290.496217] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1290.504046] env[68217]: DEBUG oslo_vmware.api [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962290, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1290.739754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1290.915480] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f1388f-d286-428a-8d5c-b79458c138e9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.923061] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824cfe56-76e5-4d4e-bd5d-6c81c8bde147 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.962743] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549d15f3-0821-459b-ae74-a38f5d0e5238 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.970178] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59f88351-fb40-40d6-8101-e73d05e398f1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1290.983091] env[68217]: DEBUG nova.compute.provider_tree [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1291.004542] env[68217]: DEBUG oslo_vmware.api [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Task: {'id': task-2962290, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.151768} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1291.004769] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1291.004948] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1291.005139] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1291.005309] env[68217]: INFO nova.compute.manager [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1291.005538] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1291.005735] env[68217]: DEBUG nova.compute.manager [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1291.005851] env[68217]: DEBUG nova.network.neutron [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1291.443427] env[68217]: DEBUG nova.compute.manager [req-396a7c07-c562-4d2f-9cde-b4def61bd8e2 req-16ee4693-8faa-46d3-8a1a-795913fb7df8 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Received event network-vif-deleted-15c3165f-90ba-4321-8d1f-aea389ccc77a {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1291.443427] env[68217]: INFO nova.compute.manager [req-396a7c07-c562-4d2f-9cde-b4def61bd8e2 req-16ee4693-8faa-46d3-8a1a-795913fb7df8 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Neutron deleted interface 15c3165f-90ba-4321-8d1f-aea389ccc77a; detaching it from the instance and deleting it from the info cache [ 1291.443882] env[68217]: DEBUG nova.network.neutron [req-396a7c07-c562-4d2f-9cde-b4def61bd8e2 req-16ee4693-8faa-46d3-8a1a-795913fb7df8 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.486543] env[68217]: DEBUG nova.scheduler.client.report [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1291.927182] env[68217]: DEBUG nova.network.neutron [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1291.946509] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1827d3e8-4d3a-4b76-80ef-67966a4b0a65 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.957058] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f373930-2c7c-47a0-a778-9d87d38686a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1291.984900] env[68217]: DEBUG nova.compute.manager [req-396a7c07-c562-4d2f-9cde-b4def61bd8e2 req-16ee4693-8faa-46d3-8a1a-795913fb7df8 service nova] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Detach interface failed, port_id=15c3165f-90ba-4321-8d1f-aea389ccc77a, reason: Instance b5fdce0e-465a-4cf0-9a15-313bba7a11e9 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1292.429602] env[68217]: INFO nova.compute.manager [-] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Took 1.42 seconds to deallocate network for instance. [ 1292.495275] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.177s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1292.498711] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.758s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1292.499077] env[68217]: DEBUG nova.objects.instance [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'resources' on Instance uuid e11b2312-4cc2-4b49-bd26-22fd5629669d {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1292.935617] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.055169] env[68217]: INFO nova.scheduler.client.report [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted allocation for migration eba49941-ecfd-44d1-bfe9-04f171b1fe1c [ 1293.088668] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.097680] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-743ad22a-1adc-411e-95c3-a865b9b06256 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.105819] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d186c5e-0267-4c64-a3bb-40677ea672e2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.135570] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-357c40e8-c52f-4920-a00d-59f929750d69 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.142648] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5094f144-c743-4bbd-b75c-e3a20d9f066a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1293.156478] env[68217]: DEBUG nova.compute.provider_tree [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1293.349676] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.349926] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.350090] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.350249] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1293.560450] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b881ef23-a0e4-40ca-9da2-24907ba7c3d7 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.726s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.659930] env[68217]: DEBUG nova.scheduler.client.report [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1293.899043] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.899325] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.899947] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1293.900175] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1293.900351] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1293.902308] env[68217]: INFO nova.compute.manager [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Terminating instance [ 1294.164719] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.667s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.167862] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.232s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1294.168139] env[68217]: DEBUG nova.objects.instance [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lazy-loading 'resources' on Instance uuid b5fdce0e-465a-4cf0-9a15-313bba7a11e9 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1294.181789] env[68217]: INFO nova.scheduler.client.report [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted allocations for instance e11b2312-4cc2-4b49-bd26-22fd5629669d [ 1294.344719] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.405298] env[68217]: DEBUG nova.compute.manager [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1294.405522] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1294.406466] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccab24ff-963b-4376-9b1d-cc06304d66b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.414474] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1294.414706] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0e490ef1-c558-4086-af3b-6cd25895d419 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.421739] env[68217]: DEBUG oslo_vmware.api [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1294.421739] env[68217]: value = "task-2962291" [ 1294.421739] env[68217]: _type = "Task" [ 1294.421739] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1294.431375] env[68217]: DEBUG oslo_vmware.api [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962291, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1294.691862] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c9bc5d7e-24d8-4984-ad53-0c453a11930f tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "e11b2312-4cc2-4b49-bd26-22fd5629669d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.337s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1294.743592] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc635a8-f364-4a18-85ca-ceecdeae11d5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.751580] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0d95cf8-f7ed-4563-b613-69e98d22102b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.785302] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f832054-bbbc-4dca-963b-57f9034bc2da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.792585] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c34d85-4288-49fd-a7af-109cb5161ab7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.805517] env[68217]: DEBUG nova.compute.provider_tree [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1294.931812] env[68217]: DEBUG oslo_vmware.api [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962291, 'name': PowerOffVM_Task, 'duration_secs': 0.209182} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1294.932030] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1294.932090] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1294.932368] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a5b33737-0701-4f8a-a216-d43159cafb90 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.991550] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1294.991775] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1294.991943] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleting the datastore file [datastore1] d28bcf16-b081-4dc8-a975-2acaed222e15 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1294.992280] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c7058b5-b07d-4a16-be58-eaa52bab5bf4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.999334] env[68217]: DEBUG oslo_vmware.api [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1294.999334] env[68217]: value = "task-2962293" [ 1294.999334] env[68217]: _type = "Task" [ 1294.999334] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.006987] env[68217]: DEBUG oslo_vmware.api [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962293, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.229291] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "01c32252-f6e0-4cb0-966e-622872d49199" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.229510] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "01c32252-f6e0-4cb0-966e-622872d49199" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.229705] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "01c32252-f6e0-4cb0-966e-622872d49199-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1295.229908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "01c32252-f6e0-4cb0-966e-622872d49199-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1295.230092] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "01c32252-f6e0-4cb0-966e-622872d49199-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.232223] env[68217]: INFO nova.compute.manager [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Terminating instance [ 1295.308606] env[68217]: DEBUG nova.scheduler.client.report [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1295.348550] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1295.348748] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1295.510069] env[68217]: DEBUG oslo_vmware.api [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962293, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130177} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1295.510069] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1295.510069] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1295.510280] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1295.510362] env[68217]: INFO nova.compute.manager [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1295.510649] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1295.510852] env[68217]: DEBUG nova.compute.manager [-] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1295.510948] env[68217]: DEBUG nova.network.neutron [-] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1295.736276] env[68217]: DEBUG nova.compute.manager [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1295.736615] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1295.737400] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9204e13e-6620-4ee6-a0f0-5d1c27fed62b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.745229] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1295.745486] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d4b966b7-7ab0-4d9c-9397-60e17d0c8854 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.751923] env[68217]: DEBUG oslo_vmware.api [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1295.751923] env[68217]: value = "task-2962294" [ 1295.751923] env[68217]: _type = "Task" [ 1295.751923] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1295.759799] env[68217]: DEBUG oslo_vmware.api [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962294, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1295.813558] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.646s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1295.833822] env[68217]: INFO nova.scheduler.client.report [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Deleted allocations for instance b5fdce0e-465a-4cf0-9a15-313bba7a11e9 [ 1296.009451] env[68217]: DEBUG nova.compute.manager [req-092472e4-543f-42ec-a232-333933c8d1f5 req-dffd9e4b-711d-46e5-a518-76715ebb26d8 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Received event network-vif-deleted-7ca41605-8ab9-4d01-835b-70d47e78fce9 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1296.009683] env[68217]: INFO nova.compute.manager [req-092472e4-543f-42ec-a232-333933c8d1f5 req-dffd9e4b-711d-46e5-a518-76715ebb26d8 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Neutron deleted interface 7ca41605-8ab9-4d01-835b-70d47e78fce9; detaching it from the instance and deleting it from the info cache [ 1296.009823] env[68217]: DEBUG nova.network.neutron [req-092472e4-543f-42ec-a232-333933c8d1f5 req-dffd9e4b-711d-46e5-a518-76715ebb26d8 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.262101] env[68217]: DEBUG oslo_vmware.api [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962294, 'name': PowerOffVM_Task, 'duration_secs': 0.2206} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.262387] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1296.262554] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1296.262803] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f7b41c18-be01-4780-8775-8de0cfe73787 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.319628] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1296.319818] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1296.320078] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleting the datastore file [datastore2] 01c32252-f6e0-4cb0-966e-622872d49199 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1296.320281] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-73f461e5-8430-4b4b-95d7-06fb371c1c72 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.326245] env[68217]: DEBUG oslo_vmware.api [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1296.326245] env[68217]: value = "task-2962296" [ 1296.326245] env[68217]: _type = "Task" [ 1296.326245] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1296.333218] env[68217]: DEBUG oslo_vmware.api [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962296, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1296.341908] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d55df8f6-a1e9-4ceb-8c50-91f2ef745f0b tempest-ServerActionsTestOtherB-1476378927 tempest-ServerActionsTestOtherB-1476378927-project-member] Lock "b5fdce0e-465a-4cf0-9a15-313bba7a11e9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.946s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1296.488533] env[68217]: DEBUG nova.network.neutron [-] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1296.511937] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b5d1233-171a-46fb-b6ac-ca43e8cf3ceb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.521627] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d23f3b03-a81d-4d7a-8c66-5c906712bda7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1296.546101] env[68217]: DEBUG nova.compute.manager [req-092472e4-543f-42ec-a232-333933c8d1f5 req-dffd9e4b-711d-46e5-a518-76715ebb26d8 service nova] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Detach interface failed, port_id=7ca41605-8ab9-4d01-835b-70d47e78fce9, reason: Instance d28bcf16-b081-4dc8-a975-2acaed222e15 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1296.835933] env[68217]: DEBUG oslo_vmware.api [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962296, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.123358} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1296.836209] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1296.836391] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1296.836564] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1296.836735] env[68217]: INFO nova.compute.manager [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1296.836993] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1296.837218] env[68217]: DEBUG nova.compute.manager [-] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1296.837315] env[68217]: DEBUG nova.network.neutron [-] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1296.991104] env[68217]: INFO nova.compute.manager [-] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Took 1.48 seconds to deallocate network for instance. [ 1297.498062] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1297.498062] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1297.498062] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1297.517801] env[68217]: INFO nova.scheduler.client.report [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted allocations for instance d28bcf16-b081-4dc8-a975-2acaed222e15 [ 1297.570010] env[68217]: DEBUG nova.network.neutron [-] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1298.025936] env[68217]: DEBUG oslo_concurrency.lockutils [None req-3ac723e2-bf02-4bb9-a950-109146fa5728 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "d28bcf16-b081-4dc8-a975-2acaed222e15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.127s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1298.040575] env[68217]: DEBUG nova.compute.manager [req-f9a31d00-31c7-4796-a349-4958066ff4f2 req-5cdf869b-5832-4e29-877f-c1ff9405ea20 service nova] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Received event network-vif-deleted-b00791ed-450f-419a-9745-945fdb5a3713 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1298.072485] env[68217]: INFO nova.compute.manager [-] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Took 1.24 seconds to deallocate network for instance. [ 1298.579067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.579067] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1298.579382] env[68217]: DEBUG nova.objects.instance [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'resources' on Instance uuid 01c32252-f6e0-4cb0-966e-622872d49199 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1298.848968] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "16244d37-2f3c-4b46-a449-7d0c679bab3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1298.849207] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1299.146290] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f5891a3-402e-4ac7-a9ea-04a2082c22c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.154028] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da14c546-63f2-4959-a44d-d72fc580caa2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.183087] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae0a5103-0006-4848-8863-3234b4e2e198 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.190191] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aecc7f37-0efd-40d4-8518-143ed627f43f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.204094] env[68217]: DEBUG nova.compute.provider_tree [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.351460] env[68217]: DEBUG nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1299.707353] env[68217]: DEBUG nova.scheduler.client.report [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1299.872970] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1300.215027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.633s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1300.215027] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.342s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1300.216957] env[68217]: INFO nova.compute.claims [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1300.255390] env[68217]: INFO nova.scheduler.client.report [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted allocations for instance 01c32252-f6e0-4cb0-966e-622872d49199 [ 1300.766225] env[68217]: DEBUG oslo_concurrency.lockutils [None req-4bf6d073-b5cd-481f-88f2-63e060218ccf tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "01c32252-f6e0-4cb0-966e-622872d49199" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.537s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1301.278499] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26e68d3-c530-48ff-9a04-287d526cfb25 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.286139] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa13f255-346f-42a6-8d1c-94680418af5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.316475] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d96b21bb-3725-47dc-a215-79447d312c3c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.323706] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a2bfdc6-7099-4cd5-9263-4bd3777d3fd6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.337905] env[68217]: DEBUG nova.compute.provider_tree [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.841642] env[68217]: DEBUG nova.scheduler.client.report [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1302.346450] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.131s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1302.346981] env[68217]: DEBUG nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1302.852019] env[68217]: DEBUG nova.compute.utils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1302.853411] env[68217]: DEBUG nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1302.853582] env[68217]: DEBUG nova.network.neutron [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1302.898248] env[68217]: DEBUG nova.policy [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c867c8ebcaeb49ec91f751e2be5349b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46f4c8c2f4764bd1b995396126b6aaf3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1303.162450] env[68217]: DEBUG nova.network.neutron [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Successfully created port: 3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1303.356674] env[68217]: DEBUG nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1304.366254] env[68217]: DEBUG nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1304.392598] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1304.392838] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1304.392992] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1304.393188] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1304.393334] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1304.393477] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1304.393684] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1304.393838] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1304.394008] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1304.394189] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1304.394358] env[68217]: DEBUG nova.virt.hardware [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1304.395227] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42287677-dc44-4203-9e2d-a38e9d44c89d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.403259] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-446c0cfb-b6c0-450c-813c-5852b8213246 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.810247] env[68217]: DEBUG nova.compute.manager [req-aa9e7cf3-6233-49ef-8887-9cc4a8026ffd req-6eda54d3-4cda-4cfb-9bcf-64f4542a5a82 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Received event network-vif-plugged-3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1304.810443] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa9e7cf3-6233-49ef-8887-9cc4a8026ffd req-6eda54d3-4cda-4cfb-9bcf-64f4542a5a82 service nova] Acquiring lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1304.810650] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa9e7cf3-6233-49ef-8887-9cc4a8026ffd req-6eda54d3-4cda-4cfb-9bcf-64f4542a5a82 service nova] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1304.810816] env[68217]: DEBUG oslo_concurrency.lockutils [req-aa9e7cf3-6233-49ef-8887-9cc4a8026ffd req-6eda54d3-4cda-4cfb-9bcf-64f4542a5a82 service nova] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1304.810978] env[68217]: DEBUG nova.compute.manager [req-aa9e7cf3-6233-49ef-8887-9cc4a8026ffd req-6eda54d3-4cda-4cfb-9bcf-64f4542a5a82 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] No waiting events found dispatching network-vif-plugged-3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1304.811152] env[68217]: WARNING nova.compute.manager [req-aa9e7cf3-6233-49ef-8887-9cc4a8026ffd req-6eda54d3-4cda-4cfb-9bcf-64f4542a5a82 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Received unexpected event network-vif-plugged-3f237628-945e-4c8d-bf15-f59386ccb358 for instance with vm_state building and task_state spawning. [ 1304.893186] env[68217]: DEBUG nova.network.neutron [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Successfully updated port: 3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1305.021063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1305.021063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.021063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1305.021063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1305.021063] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1305.022942] env[68217]: INFO nova.compute.manager [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Terminating instance [ 1305.395901] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1305.396324] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1305.396324] env[68217]: DEBUG nova.network.neutron [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1305.527063] env[68217]: DEBUG nova.compute.manager [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1305.527063] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1305.527764] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-264ec7b7-fc7c-4c58-a305-026b2f3b50a9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.535583] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1305.535794] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6abf1e29-c86f-4e99-8186-7f3f43d32961 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1305.542059] env[68217]: DEBUG oslo_vmware.api [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1305.542059] env[68217]: value = "task-2962299" [ 1305.542059] env[68217]: _type = "Task" [ 1305.542059] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1305.548998] env[68217]: DEBUG oslo_vmware.api [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962299, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1305.926854] env[68217]: DEBUG nova.network.neutron [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1306.048036] env[68217]: DEBUG nova.network.neutron [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.052974] env[68217]: DEBUG oslo_vmware.api [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962299, 'name': PowerOffVM_Task, 'duration_secs': 0.228413} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.053429] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1306.053652] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1306.053897] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-8740bc05-b118-4cda-9c01-09408a95a86a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.112127] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1306.112350] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1306.112531] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleting the datastore file [datastore1] a86015ea-fa6b-4cf8-9d79-273ffa02ec23 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1306.112823] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-880ee366-7206-46da-a645-20a8490127f6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.119103] env[68217]: DEBUG oslo_vmware.api [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for the task: (returnval){ [ 1306.119103] env[68217]: value = "task-2962301" [ 1306.119103] env[68217]: _type = "Task" [ 1306.119103] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.126471] env[68217]: DEBUG oslo_vmware.api [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962301, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.554246] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1306.554641] env[68217]: DEBUG nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Instance network_info: |[{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1306.554989] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:d7:79', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f237628-945e-4c8d-bf15-f59386ccb358', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.562509] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1306.562731] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1306.562950] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d216034b-cff8-459f-8717-368029efea40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.582784] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.582784] env[68217]: value = "task-2962302" [ 1306.582784] env[68217]: _type = "Task" [ 1306.582784] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.590017] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962302, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.629139] env[68217]: DEBUG oslo_vmware.api [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Task: {'id': task-2962301, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.130135} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.629555] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1306.629854] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1306.630165] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1306.630462] env[68217]: INFO nova.compute.manager [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1306.630845] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1306.631150] env[68217]: DEBUG nova.compute.manager [-] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1306.631305] env[68217]: DEBUG nova.network.neutron [-] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1306.836604] env[68217]: DEBUG nova.compute.manager [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Received event network-changed-3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1306.836811] env[68217]: DEBUG nova.compute.manager [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Refreshing instance network info cache due to event network-changed-3f237628-945e-4c8d-bf15-f59386ccb358. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1306.837033] env[68217]: DEBUG oslo_concurrency.lockutils [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] Acquiring lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1306.837264] env[68217]: DEBUG oslo_concurrency.lockutils [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] Acquired lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1306.837420] env[68217]: DEBUG nova.network.neutron [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Refreshing network info cache for port 3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1307.094194] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962302, 'name': CreateVM_Task, 'duration_secs': 0.291203} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.094428] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1307.094921] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.095099] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.095413] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1307.095647] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca394674-3162-4b85-acbf-51f271d134ba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.099856] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1307.099856] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a04f1c-8d4e-a7c7-e3de-9d6751e59e67" [ 1307.099856] env[68217]: _type = "Task" [ 1307.099856] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.106771] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a04f1c-8d4e-a7c7-e3de-9d6751e59e67, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.387106] env[68217]: DEBUG nova.network.neutron [-] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.519114] env[68217]: DEBUG nova.network.neutron [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updated VIF entry in instance network info cache for port 3f237628-945e-4c8d-bf15-f59386ccb358. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1307.519490] env[68217]: DEBUG nova.network.neutron [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.609532] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a04f1c-8d4e-a7c7-e3de-9d6751e59e67, 'name': SearchDatastore_Task, 'duration_secs': 0.010507} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1307.609919] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1307.610032] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.610301] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1307.610445] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1307.610618] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1307.610860] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c62b5e8e-b610-4322-ab5e-44b7c998211a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.619414] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1307.619539] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1307.620213] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-48f8282b-c04f-4d88-9a2c-e63759bd5fd8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1307.624659] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1307.624659] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5234d2ce-1365-72c7-d95b-0fd5fa4a3900" [ 1307.624659] env[68217]: _type = "Task" [ 1307.624659] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1307.631761] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5234d2ce-1365-72c7-d95b-0fd5fa4a3900, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.890190] env[68217]: INFO nova.compute.manager [-] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Took 1.26 seconds to deallocate network for instance. [ 1308.022541] env[68217]: DEBUG oslo_concurrency.lockutils [req-68519e95-9e99-4125-a92b-18cc6789cd78 req-c5dfab5a-85a7-4024-bbcf-4f5fd120dee3 service nova] Releasing lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.135274] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5234d2ce-1365-72c7-d95b-0fd5fa4a3900, 'name': SearchDatastore_Task, 'duration_secs': 0.009184} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.135999] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-12a3fa7c-210e-4d13-af36-7e679dc92253 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.140905] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1308.140905] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b7d252-ef1f-90e5-5ee5-4298a25206d1" [ 1308.140905] env[68217]: _type = "Task" [ 1308.140905] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.148304] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b7d252-ef1f-90e5-5ee5-4298a25206d1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.397142] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1308.397402] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1308.397624] env[68217]: DEBUG nova.objects.instance [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lazy-loading 'resources' on Instance uuid a86015ea-fa6b-4cf8-9d79-273ffa02ec23 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1308.651216] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b7d252-ef1f-90e5-5ee5-4298a25206d1, 'name': SearchDatastore_Task, 'duration_secs': 0.009969} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1308.651595] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1308.651643] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1308.651892] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-160248cf-1a2a-48da-9cb3-2eb9f9c912d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.658056] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1308.658056] env[68217]: value = "task-2962303" [ 1308.658056] env[68217]: _type = "Task" [ 1308.658056] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1308.665066] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1308.876165] env[68217]: DEBUG nova.compute.manager [req-139c684a-05e5-42ae-8c9f-d2bcb60e98b2 req-2c379302-2690-466c-912d-c34c8af5bf24 service nova] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Received event network-vif-deleted-21f37b3b-0b0a-412e-8413-f3a1967f5c79 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1308.960159] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff0b856-9c2d-4d30-987a-4aee4695d4c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1308.968677] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c6460d8-0979-44ce-bc0f-85aa28fe86cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.002405] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd319a34-15f0-455d-ad67-7c75e6193729 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.010543] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5feddf3-c4f8-4b92-be92-b4555642d728 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.025208] env[68217]: DEBUG nova.compute.provider_tree [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1309.168128] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.46447} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.168362] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1309.168575] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1309.168823] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2c093e7c-1a19-4c00-95bf-73abe3617b73 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.174548] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1309.174548] env[68217]: value = "task-2962304" [ 1309.174548] env[68217]: _type = "Task" [ 1309.174548] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.182613] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962304, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1309.528872] env[68217]: DEBUG nova.scheduler.client.report [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1309.688526] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962304, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064686} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1309.688899] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1309.689537] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aed2878e-4b1e-44e8-884b-24cf3c1132a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.710833] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1309.711067] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f927ace7-35e3-4860-9567-475d33b306ed {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1309.729718] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1309.729718] env[68217]: value = "task-2962305" [ 1309.729718] env[68217]: _type = "Task" [ 1309.729718] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1309.737022] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.032861] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.635s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.052937] env[68217]: INFO nova.scheduler.client.report [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Deleted allocations for instance a86015ea-fa6b-4cf8-9d79-273ffa02ec23 [ 1310.239833] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962305, 'name': ReconfigVM_Task, 'duration_secs': 0.287893} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1310.240126] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1310.240758] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0742819a-54ee-447f-b778-ce088332d9f3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1310.247842] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1310.247842] env[68217]: value = "task-2962306" [ 1310.247842] env[68217]: _type = "Task" [ 1310.247842] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1310.256519] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962306, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1310.560790] env[68217]: DEBUG oslo_concurrency.lockutils [None req-b2d877a5-c33b-42cf-a602-038147d5a6d3 tempest-ServerActionsTestOtherA-393099776 tempest-ServerActionsTestOtherA-393099776-project-member] Lock "a86015ea-fa6b-4cf8-9d79-273ffa02ec23" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.540s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1310.757615] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962306, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.258330] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962306, 'name': Rename_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1311.758418] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962306, 'name': Rename_Task, 'duration_secs': 1.142307} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1311.758736] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1311.758903] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-8b1b7342-706a-4e42-8229-ea6a465c4cd2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1311.764857] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1311.764857] env[68217]: value = "task-2962307" [ 1311.764857] env[68217]: _type = "Task" [ 1311.764857] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1311.773477] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962307, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1312.274520] env[68217]: DEBUG oslo_vmware.api [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962307, 'name': PowerOnVM_Task, 'duration_secs': 0.425033} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1312.274775] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1312.274971] env[68217]: INFO nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Took 7.91 seconds to spawn the instance on the hypervisor. [ 1312.275939] env[68217]: DEBUG nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1312.275939] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82eb03f4-1bf8-4b54-982e-595e766e9fcb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1312.793898] env[68217]: INFO nova.compute.manager [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Took 12.94 seconds to build instance. [ 1312.981064] env[68217]: DEBUG nova.compute.manager [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Received event network-changed-3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1312.981267] env[68217]: DEBUG nova.compute.manager [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Refreshing instance network info cache due to event network-changed-3f237628-945e-4c8d-bf15-f59386ccb358. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1312.981545] env[68217]: DEBUG oslo_concurrency.lockutils [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] Acquiring lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1312.981710] env[68217]: DEBUG oslo_concurrency.lockutils [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] Acquired lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1312.981939] env[68217]: DEBUG nova.network.neutron [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Refreshing network info cache for port 3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1313.296397] env[68217]: DEBUG oslo_concurrency.lockutils [None req-92d8a958-8bfe-4b80-b5a4-950e87bca50f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.447s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1313.819173] env[68217]: DEBUG nova.network.neutron [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updated VIF entry in instance network info cache for port 3f237628-945e-4c8d-bf15-f59386ccb358. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1313.819626] env[68217]: DEBUG nova.network.neutron [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1314.324732] env[68217]: DEBUG oslo_concurrency.lockutils [req-be7d3ea9-7f25-4b3a-bdb4-2744000e31ee req-50330d00-68c0-464e-a791-b99eb4240257 service nova] Releasing lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1316.075937] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1316.076223] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1316.580264] env[68217]: DEBUG nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1317.103101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1317.103101] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1317.104486] env[68217]: INFO nova.compute.claims [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1318.162601] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34616956-4e63-49ca-afd0-34c5a4e85d92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.170057] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de8e5b9-d810-4d25-a7d4-cf3c95ef1010 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.198245] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7c3616-2c8d-4b1f-8fc9-82f168fc8a30 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.204870] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe740dbf-6bf6-4f90-9fa0-4bab0af90358 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1318.218160] env[68217]: DEBUG nova.compute.provider_tree [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1318.724200] env[68217]: DEBUG nova.scheduler.client.report [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1319.228819] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.126s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1319.229324] env[68217]: DEBUG nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1319.734241] env[68217]: DEBUG nova.compute.utils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1319.735621] env[68217]: DEBUG nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1319.735793] env[68217]: DEBUG nova.network.neutron [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1319.770099] env[68217]: DEBUG nova.policy [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ba383044247645198e47d7e2938d9254', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c6466fb4e14840cbb68ade36b0943858', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1320.012031] env[68217]: DEBUG nova.network.neutron [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Successfully created port: 50f741f6-dd93-45a5-9a90-c14276a7fad7 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1320.238801] env[68217]: DEBUG nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1321.249022] env[68217]: DEBUG nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1321.275909] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1321.276172] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1321.276320] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1321.276530] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1321.276681] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1321.276828] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1321.277045] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1321.277209] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1321.277374] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1321.277537] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1321.277705] env[68217]: DEBUG nova.virt.hardware [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1321.278571] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d433fdd8-6ad9-476c-8503-3ee4743add11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.286731] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a9c2f43-5654-4520-a0ff-5df2aa8d188b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1321.361771] env[68217]: DEBUG nova.compute.manager [req-7e73eaf6-d2a2-45b5-bc8a-8ae8e595f59f req-aff9c79d-ee10-4e00-b5b5-5cc8253cbf6d service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Received event network-vif-plugged-50f741f6-dd93-45a5-9a90-c14276a7fad7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1321.361988] env[68217]: DEBUG oslo_concurrency.lockutils [req-7e73eaf6-d2a2-45b5-bc8a-8ae8e595f59f req-aff9c79d-ee10-4e00-b5b5-5cc8253cbf6d service nova] Acquiring lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1321.362211] env[68217]: DEBUG oslo_concurrency.lockutils [req-7e73eaf6-d2a2-45b5-bc8a-8ae8e595f59f req-aff9c79d-ee10-4e00-b5b5-5cc8253cbf6d service nova] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1321.362446] env[68217]: DEBUG oslo_concurrency.lockutils [req-7e73eaf6-d2a2-45b5-bc8a-8ae8e595f59f req-aff9c79d-ee10-4e00-b5b5-5cc8253cbf6d service nova] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1321.362632] env[68217]: DEBUG nova.compute.manager [req-7e73eaf6-d2a2-45b5-bc8a-8ae8e595f59f req-aff9c79d-ee10-4e00-b5b5-5cc8253cbf6d service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] No waiting events found dispatching network-vif-plugged-50f741f6-dd93-45a5-9a90-c14276a7fad7 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1321.362794] env[68217]: WARNING nova.compute.manager [req-7e73eaf6-d2a2-45b5-bc8a-8ae8e595f59f req-aff9c79d-ee10-4e00-b5b5-5cc8253cbf6d service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Received unexpected event network-vif-plugged-50f741f6-dd93-45a5-9a90-c14276a7fad7 for instance with vm_state building and task_state spawning. [ 1321.437849] env[68217]: DEBUG nova.network.neutron [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Successfully updated port: 50f741f6-dd93-45a5-9a90-c14276a7fad7 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1321.942737] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "refresh_cache-2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1321.942737] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquired lock "refresh_cache-2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1321.942737] env[68217]: DEBUG nova.network.neutron [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1322.473182] env[68217]: DEBUG nova.network.neutron [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1322.591519] env[68217]: DEBUG nova.network.neutron [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Updating instance_info_cache with network_info: [{"id": "50f741f6-dd93-45a5-9a90-c14276a7fad7", "address": "fa:16:3e:28:b9:37", "network": {"id": "204aa7e2-fee6-44a9-ae06-ede205cb4427", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-307448272-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6466fb4e14840cbb68ade36b0943858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f741f6-dd", "ovs_interfaceid": "50f741f6-dd93-45a5-9a90-c14276a7fad7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1323.094010] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Releasing lock "refresh_cache-2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1323.094356] env[68217]: DEBUG nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Instance network_info: |[{"id": "50f741f6-dd93-45a5-9a90-c14276a7fad7", "address": "fa:16:3e:28:b9:37", "network": {"id": "204aa7e2-fee6-44a9-ae06-ede205cb4427", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-307448272-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6466fb4e14840cbb68ade36b0943858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f741f6-dd", "ovs_interfaceid": "50f741f6-dd93-45a5-9a90-c14276a7fad7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1323.094779] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:b9:37', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'edd47158-6f4b-44a1-8e82-0411205ad299', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '50f741f6-dd93-45a5-9a90-c14276a7fad7', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1323.102196] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Creating folder: Project (c6466fb4e14840cbb68ade36b0943858). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1323.102453] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f9781cb2-c01f-4cae-b2cd-66b1679a6411 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.113602] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Created folder: Project (c6466fb4e14840cbb68ade36b0943858) in parent group-v594094. [ 1323.113771] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Creating folder: Instances. Parent ref: group-v594433. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1323.113984] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6c24c9d4-e1f7-45f3-9175-aceac5cd923f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.124437] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Created folder: Instances in parent group-v594433. [ 1323.124645] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1323.124815] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1323.124997] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-18ffc1d0-7cc8-45e6-903d-1b2c77d207b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.142452] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1323.142452] env[68217]: value = "task-2962310" [ 1323.142452] env[68217]: _type = "Task" [ 1323.142452] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.150213] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962310, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1323.388895] env[68217]: DEBUG nova.compute.manager [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Received event network-changed-50f741f6-dd93-45a5-9a90-c14276a7fad7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1323.389059] env[68217]: DEBUG nova.compute.manager [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Refreshing instance network info cache due to event network-changed-50f741f6-dd93-45a5-9a90-c14276a7fad7. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1323.389296] env[68217]: DEBUG oslo_concurrency.lockutils [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] Acquiring lock "refresh_cache-2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.389458] env[68217]: DEBUG oslo_concurrency.lockutils [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] Acquired lock "refresh_cache-2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.389637] env[68217]: DEBUG nova.network.neutron [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Refreshing network info cache for port 50f741f6-dd93-45a5-9a90-c14276a7fad7 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1323.652527] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962310, 'name': CreateVM_Task, 'duration_secs': 0.305818} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1323.652812] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1323.653360] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1323.653528] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1323.653832] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1323.654091] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-87cef9e5-7b36-4bf7-9d2e-d3d9544d1859 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1323.658269] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1323.658269] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fc945e-9091-f901-511c-a2233806f0fd" [ 1323.658269] env[68217]: _type = "Task" [ 1323.658269] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1323.665761] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fc945e-9091-f901-511c-a2233806f0fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.073247] env[68217]: DEBUG nova.network.neutron [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Updated VIF entry in instance network info cache for port 50f741f6-dd93-45a5-9a90-c14276a7fad7. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1324.073612] env[68217]: DEBUG nova.network.neutron [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Updating instance_info_cache with network_info: [{"id": "50f741f6-dd93-45a5-9a90-c14276a7fad7", "address": "fa:16:3e:28:b9:37", "network": {"id": "204aa7e2-fee6-44a9-ae06-ede205cb4427", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-307448272-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c6466fb4e14840cbb68ade36b0943858", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "edd47158-6f4b-44a1-8e82-0411205ad299", "external-id": "nsx-vlan-transportzone-587", "segmentation_id": 587, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap50f741f6-dd", "ovs_interfaceid": "50f741f6-dd93-45a5-9a90-c14276a7fad7", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1324.168845] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fc945e-9091-f901-511c-a2233806f0fd, 'name': SearchDatastore_Task, 'duration_secs': 0.00912} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.169092] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.169329] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1324.169564] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1324.169710] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1324.169885] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1324.170145] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6a2de904-7278-4707-af63-67a9f9bfdd92 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.178029] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1324.178202] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1324.178843] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d763c4e9-cc13-4617-b7df-156865b3ca1d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.183247] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1324.183247] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52797fb3-a7f4-ab40-2b56-13d2955f196e" [ 1324.183247] env[68217]: _type = "Task" [ 1324.183247] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.190303] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52797fb3-a7f4-ab40-2b56-13d2955f196e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1324.576695] env[68217]: DEBUG oslo_concurrency.lockutils [req-174e6cc9-f77b-4321-b02d-770a9983efaa req-5bb9f636-e61d-433e-bf5c-9e74bcee6f1a service nova] Releasing lock "refresh_cache-2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1324.693631] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52797fb3-a7f4-ab40-2b56-13d2955f196e, 'name': SearchDatastore_Task, 'duration_secs': 0.007715} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1324.694396] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-daba9098-cbd8-4805-8a06-7094a60d9f9f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1324.699138] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1324.699138] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272702c-5dfe-fe23-0b63-18bf2b1eb655" [ 1324.699138] env[68217]: _type = "Task" [ 1324.699138] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1324.706225] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272702c-5dfe-fe23-0b63-18bf2b1eb655, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.208931] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5272702c-5dfe-fe23-0b63-18bf2b1eb655, 'name': SearchDatastore_Task, 'duration_secs': 0.009055} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.209203] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1325.209462] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2/2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1325.209708] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-41e90b1b-63b4-4dd2-b5e5-564434f0816a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.216278] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1325.216278] env[68217]: value = "task-2962311" [ 1325.216278] env[68217]: _type = "Task" [ 1325.216278] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.223380] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962311, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1325.725432] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962311, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.431125} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1325.725805] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2/2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1325.725881] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1325.726108] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-17375de9-1e23-49c7-b745-a51ec49a3f34 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1325.733467] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1325.733467] env[68217]: value = "task-2962312" [ 1325.733467] env[68217]: _type = "Task" [ 1325.733467] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1325.740205] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.243058] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062251} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1326.243261] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1326.244061] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bcf0a05-7998-4986-9c12-1de69a4aa4e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.266394] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Reconfiguring VM instance instance-0000007e to attach disk [datastore1] 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2/2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1326.266694] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-abe48dd4-5aae-4651-9be5-ed9d9ebb6ba2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1326.287085] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1326.287085] env[68217]: value = "task-2962313" [ 1326.287085] env[68217]: _type = "Task" [ 1326.287085] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1326.294891] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962313, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1326.797438] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962313, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.298687] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962313, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1327.798632] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962313, 'name': ReconfigVM_Task} progress is 99%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.299154] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962313, 'name': ReconfigVM_Task, 'duration_secs': 1.620658} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.299465] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Reconfigured VM instance instance-0000007e to attach disk [datastore1] 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2/2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1328.300100] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ecc4c19a-46a9-4d6e-926c-00acfb4f03fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.306357] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1328.306357] env[68217]: value = "task-2962314" [ 1328.306357] env[68217]: _type = "Task" [ 1328.306357] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.313709] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962314, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1328.815541] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962314, 'name': Rename_Task, 'duration_secs': 0.12782} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1328.815947] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1328.816101] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-fbc25e73-d33f-434a-82ce-199dc5e83d9e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1328.822102] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1328.822102] env[68217]: value = "task-2962315" [ 1328.822102] env[68217]: _type = "Task" [ 1328.822102] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1328.828976] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962315, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1329.331577] env[68217]: DEBUG oslo_vmware.api [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962315, 'name': PowerOnVM_Task, 'duration_secs': 0.427836} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1329.331856] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1329.332061] env[68217]: INFO nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Took 8.08 seconds to spawn the instance on the hypervisor. [ 1329.332245] env[68217]: DEBUG nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1329.332996] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4ac378-acc1-4ca2-a83a-f52060b9609e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1329.849050] env[68217]: INFO nova.compute.manager [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Took 12.76 seconds to build instance. [ 1330.351913] env[68217]: DEBUG oslo_concurrency.lockutils [None req-a9016b9a-f9c4-4a45-a16e-754a239ecdb4 tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.276s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.980249] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.980662] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.980740] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1330.980926] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1330.981109] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1330.983271] env[68217]: INFO nova.compute.manager [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Terminating instance [ 1331.487051] env[68217]: DEBUG nova.compute.manager [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1331.487322] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1331.488291] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47744776-f365-4ed2-9bba-5ab9b26056e0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.496458] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1331.496681] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-42c67860-1346-4887-9f40-df21c0318913 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1331.502402] env[68217]: DEBUG oslo_vmware.api [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1331.502402] env[68217]: value = "task-2962316" [ 1331.502402] env[68217]: _type = "Task" [ 1331.502402] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1331.509995] env[68217]: DEBUG oslo_vmware.api [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962316, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.011309] env[68217]: DEBUG oslo_vmware.api [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962316, 'name': PowerOffVM_Task, 'duration_secs': 0.167945} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.011650] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1332.011777] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1332.012035] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-97f2237d-3e65-4e99-bdaa-6ebaea900b26 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.067217] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1332.067444] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1332.067602] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Deleting the datastore file [datastore1] 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1332.067862] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0c733469-2fd0-4092-81ff-97fa24c82f5e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1332.073799] env[68217]: DEBUG oslo_vmware.api [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for the task: (returnval){ [ 1332.073799] env[68217]: value = "task-2962318" [ 1332.073799] env[68217]: _type = "Task" [ 1332.073799] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1332.080823] env[68217]: DEBUG oslo_vmware.api [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962318, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1332.583920] env[68217]: DEBUG oslo_vmware.api [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Task: {'id': task-2962318, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.111376} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1332.586041] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1332.586041] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1332.586041] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1332.586041] env[68217]: INFO nova.compute.manager [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1332.586041] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1332.586041] env[68217]: DEBUG nova.compute.manager [-] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1332.586041] env[68217]: DEBUG nova.network.neutron [-] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1332.849608] env[68217]: DEBUG nova.compute.manager [req-12404db4-ee23-44cc-8863-981f815bc19c req-593963c8-480f-4a45-a713-7fa01848c434 service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Received event network-vif-deleted-50f741f6-dd93-45a5-9a90-c14276a7fad7 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1332.849677] env[68217]: INFO nova.compute.manager [req-12404db4-ee23-44cc-8863-981f815bc19c req-593963c8-480f-4a45-a713-7fa01848c434 service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Neutron deleted interface 50f741f6-dd93-45a5-9a90-c14276a7fad7; detaching it from the instance and deleting it from the info cache [ 1332.849891] env[68217]: DEBUG nova.network.neutron [req-12404db4-ee23-44cc-8863-981f815bc19c req-593963c8-480f-4a45-a713-7fa01848c434 service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.331059] env[68217]: DEBUG nova.network.neutron [-] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1333.352504] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-2066f229-e0a6-4988-91de-5d122d33ef99 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.362362] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba297ada-7e65-4223-a3d5-74c99dd5c230 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1333.387013] env[68217]: DEBUG nova.compute.manager [req-12404db4-ee23-44cc-8863-981f815bc19c req-593963c8-480f-4a45-a713-7fa01848c434 service nova] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Detach interface failed, port_id=50f741f6-dd93-45a5-9a90-c14276a7fad7, reason: Instance 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1333.833244] env[68217]: INFO nova.compute.manager [-] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Took 1.25 seconds to deallocate network for instance. [ 1334.339522] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1334.339826] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1334.340025] env[68217]: DEBUG nova.objects.instance [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lazy-loading 'resources' on Instance uuid 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1334.895877] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98b420be-7984-40b9-835d-484694802443 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.903529] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d8035f0-fda0-4dd4-9f28-07d7de83bb61 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.933697] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79899400-cab0-4c8c-9f40-6a7d1640bb7b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.941047] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d84f6816-4f3c-4737-981f-f475ecc5f8a5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1334.953835] env[68217]: DEBUG nova.compute.provider_tree [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1335.456795] env[68217]: DEBUG nova.scheduler.client.report [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1335.962042] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.622s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1335.980243] env[68217]: INFO nova.scheduler.client.report [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Deleted allocations for instance 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2 [ 1336.489231] env[68217]: DEBUG oslo_concurrency.lockutils [None req-dac1eb40-c647-4edb-b203-100819ebf70f tempest-ServerMetadataTestJSON-166387806 tempest-ServerMetadataTestJSON-166387806-project-member] Lock "2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.509s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1339.448154] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1339.448470] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1339.950641] env[68217]: DEBUG nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1340.471481] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1340.471754] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1340.473218] env[68217]: INFO nova.compute.claims [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1341.527373] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ce36dad-5ef7-4190-be4a-ad0178c61be4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.534592] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9c35a0b-c3a5-4f71-b8b9-3414ca0d548e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.564909] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fab978b1-1bcd-4e06-9f84-3b5b5d0d8f5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.571517] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a54347a-39dc-4149-b62f-1e8279e0e898 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1341.584134] env[68217]: DEBUG nova.compute.provider_tree [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1342.087479] env[68217]: DEBUG nova.scheduler.client.report [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1342.593025] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.121s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1342.593541] env[68217]: DEBUG nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1343.098171] env[68217]: DEBUG nova.compute.utils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1343.099553] env[68217]: DEBUG nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1343.601765] env[68217]: DEBUG nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1344.611050] env[68217]: DEBUG nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1344.637028] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1344.637278] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1344.637435] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1344.637628] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1344.637798] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1344.637944] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1344.638161] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1344.638320] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1344.638484] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1344.638642] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1344.638809] env[68217]: DEBUG nova.virt.hardware [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1344.639704] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f88ef83-a26d-4ab9-bf19-4befed594ff8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.650063] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e809af75-ad50-454b-afa9-614a9d79888c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.663474] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1344.668946] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Creating folder: Project (a048640f0c654ec1a448ec808557eff8). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.669203] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c2d3f242-635b-467a-9654-65dba598b277 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.678165] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Created folder: Project (a048640f0c654ec1a448ec808557eff8) in parent group-v594094. [ 1344.678312] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Creating folder: Instances. Parent ref: group-v594436. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1344.678516] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-11de2493-fd1f-4540-a54f-822ff52e86c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.685809] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Created folder: Instances in parent group-v594436. [ 1344.686029] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1344.686206] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1344.686383] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fbceb600-2b17-4d5d-b950-24f9a44c9659 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1344.701290] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1344.701290] env[68217]: value = "task-2962321" [ 1344.701290] env[68217]: _type = "Task" [ 1344.701290] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1344.707773] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962321, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.212281] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962321, 'name': CreateVM_Task, 'duration_secs': 0.253005} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.212281] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1345.212688] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.212847] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.213181] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1345.213421] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-44810568-cd22-408c-a23d-38082976377e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.217832] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1345.217832] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529dba3c-ba22-5029-d5c2-424731944b2e" [ 1345.217832] env[68217]: _type = "Task" [ 1345.217832] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.224912] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529dba3c-ba22-5029-d5c2-424731944b2e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1345.728230] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529dba3c-ba22-5029-d5c2-424731944b2e, 'name': SearchDatastore_Task, 'duration_secs': 0.009909} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1345.728596] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1345.728769] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1345.729023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1345.729172] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1345.729356] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1345.729607] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fbc8f7c3-71fc-4d71-9ac4-c4d6e57b00d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.738105] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1345.738289] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1345.738952] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a889ff20-ff7c-4493-9637-672953d0fb36 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1345.744015] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1345.744015] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f3ca36-2447-105f-fcd2-0f3bc42c4d47" [ 1345.744015] env[68217]: _type = "Task" [ 1345.744015] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1345.750974] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f3ca36-2447-105f-fcd2-0f3bc42c4d47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.253678] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52f3ca36-2447-105f-fcd2-0f3bc42c4d47, 'name': SearchDatastore_Task, 'duration_secs': 0.009243} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.254438] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8cb07b15-ca2e-4607-a2e4-e8e975090d60 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.259732] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1346.259732] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff1d3a-43d1-be6b-24e2-27a78b79cd18" [ 1346.259732] env[68217]: _type = "Task" [ 1346.259732] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.266692] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff1d3a-43d1-be6b-24e2-27a78b79cd18, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1346.769756] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52ff1d3a-43d1-be6b-24e2-27a78b79cd18, 'name': SearchDatastore_Task, 'duration_secs': 0.009263} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1346.770145] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1346.770295] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1346.770540] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-865c5dd3-5e86-485d-9e35-70605bca8088 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1346.776535] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1346.776535] env[68217]: value = "task-2962322" [ 1346.776535] env[68217]: _type = "Task" [ 1346.776535] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1346.783254] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962322, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.286278] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962322, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.471496} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.286532] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1347.286748] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1347.286989] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-58c021c9-60a5-42f2-86fa-ca24b21523ab {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.292649] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1347.292649] env[68217]: value = "task-2962323" [ 1347.292649] env[68217]: _type = "Task" [ 1347.292649] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.299503] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962323, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1347.801884] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962323, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066204} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1347.802336] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1347.802920] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076a86a9-f8df-45f4-bda7-a0c8ba0cccfd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.822024] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1347.822258] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0650aa0-fb9c-4651-8b55-affdbe07f54a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.841397] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1347.841397] env[68217]: value = "task-2962324" [ 1347.841397] env[68217]: _type = "Task" [ 1347.841397] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.848349] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962324, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.350692] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962324, 'name': ReconfigVM_Task, 'duration_secs': 0.273547} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.350987] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Reconfigured VM instance instance-0000007f to attach disk [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1348.351602] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c4bf6a45-1761-48bf-bb8c-b405d82d33da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.357118] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1348.357118] env[68217]: value = "task-2962325" [ 1348.357118] env[68217]: _type = "Task" [ 1348.357118] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.364417] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962325, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.867085] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962325, 'name': Rename_Task, 'duration_secs': 0.135052} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1348.867466] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1348.867591] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-74ab8d5d-7589-47a0-b913-058b8ab9021f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.873371] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1348.873371] env[68217]: value = "task-2962326" [ 1348.873371] env[68217]: _type = "Task" [ 1348.873371] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.881299] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962326, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1349.348790] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1349.382820] env[68217]: DEBUG oslo_vmware.api [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962326, 'name': PowerOnVM_Task, 'duration_secs': 0.421123} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.383061] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1349.383301] env[68217]: INFO nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Took 4.77 seconds to spawn the instance on the hypervisor. [ 1349.383484] env[68217]: DEBUG nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1349.384263] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5d33286-d769-44a5-be75-397a11d43452 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.852724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.852724] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.852883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1349.852982] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1349.853840] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f020ffe-96fa-4b14-b192-10518a892624 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.862140] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e31ab0f4-7408-4535-8923-56ddacb2207e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.876154] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b5e8e76-3050-42a3-a540-d4c2d2019b03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.882683] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf1cc2b3-0c34-468b-ba7c-b53116fedda4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.915149] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180855MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1349.915245] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1349.915451] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1349.920318] env[68217]: INFO nova.compute.manager [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Took 9.46 seconds to build instance. [ 1350.423112] env[68217]: DEBUG oslo_concurrency.lockutils [None req-c3645fdc-42bd-4c21-9535-f80267956581 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 10.974s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1350.453842] env[68217]: INFO nova.compute.manager [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Rebuilding instance [ 1350.492910] env[68217]: DEBUG nova.compute.manager [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1350.493769] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b56b3fe-0e92-46d1-b5f0-3d5843d8a119 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.924830] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Skipping migration as instance is neither resizing nor live-migrating. {{(pid=68217) _update_usage_from_migrations /opt/stack/nova/nova/compute/resource_tracker.py:1563}} [ 1350.948372] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.948524] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.948648] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Migration c330fdf2-7619-4878-a5f7-bd6b51793328 is active on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1712}} [ 1350.948767] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 16244d37-2f3c-4b46-a449-7d0c679bab3f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1350.948955] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 3 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1350.949096] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1088MB phys_disk=200GB used_disk=3GB total_vcpus=48 used_vcpus=3 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1351.000618] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20a9f035-57c5-47e7-9493-d335b4ed40cb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.009778] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84384eef-b9e7-4399-9eb3-bbe310a970cd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.039249] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0079383c-4da1-4e84-a601-7178922802a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.045906] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c2348a8-d880-4ff3-a282-3fc8cb6368eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.059350] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1351.163586] env[68217]: DEBUG nova.compute.manager [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Stashing vm_state: active {{(pid=68217) _prep_resize /opt/stack/nova/nova/compute/manager.py:6136}} [ 1351.506548] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1351.506850] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7516bb30-1be7-4ebf-81e0-972490587217 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.514383] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1351.514383] env[68217]: value = "task-2962327" [ 1351.514383] env[68217]: _type = "Task" [ 1351.514383] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1351.522135] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962327, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1351.562450] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1351.680124] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1352.024013] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962327, 'name': PowerOffVM_Task, 'duration_secs': 0.111851} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.024444] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1352.024910] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1352.025654] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e548b5d-aa71-4252-9108-546607a623fe {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.031729] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1352.031937] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bec3afb5-3460-4609-94f5-bf388ed90c1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.057048] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1352.057260] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1352.057437] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Deleting the datastore file [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1352.057674] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-95d43875-5335-462d-b9a3-dd653d67aead {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.063528] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1352.063528] env[68217]: value = "task-2962329" [ 1352.063528] env[68217]: _type = "Task" [ 1352.063528] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.067226] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1352.067409] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.152s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1352.067697] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.388s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1352.073610] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962329, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.575073] env[68217]: INFO nova.compute.claims [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1352.578441] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962329, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.090766} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.578809] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1352.578995] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1352.579236] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1353.081084] env[68217]: INFO nova.compute.resource_tracker [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating resource usage from migration c330fdf2-7619-4878-a5f7-bd6b51793328 [ 1353.137583] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c0e9a81-46dd-4ff5-9b58-c0993be1443a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.145015] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7eacdbef-ebf7-4897-948a-34363a9d1f8c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.175413] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe4cb5ec-5c21-44c6-afcc-2b808307e6aa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.182600] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba722ea4-b83b-4657-92b2-d0af6a380b09 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.195175] env[68217]: DEBUG nova.compute.provider_tree [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1353.614668] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1353.614876] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1353.615049] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1353.615232] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1353.615378] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1353.615521] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1353.615723] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1353.615879] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1353.616053] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1353.616217] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1353.616390] env[68217]: DEBUG nova.virt.hardware [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1353.617237] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a67a2f6-e141-465f-8c09-1c4ba027dc5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.624917] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b338f64-735a-400c-b8a0-8b9c51972656 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.637542] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1353.642928] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1353.643146] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1353.643333] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ebb66e24-e096-4402-bad5-5239cb81f47c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1353.659950] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1353.659950] env[68217]: value = "task-2962330" [ 1353.659950] env[68217]: _type = "Task" [ 1353.659950] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1353.666731] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962330, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1353.699742] env[68217]: DEBUG nova.scheduler.client.report [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1354.169474] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962330, 'name': CreateVM_Task, 'duration_secs': 0.240581} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.169879] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1354.170122] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.170291] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.170602] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1354.170854] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ed724403-4c00-4349-8e68-47ddfeefbc6e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.175032] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1354.175032] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52694f40-3e28-e973-8cbc-9d96434b8cb7" [ 1354.175032] env[68217]: _type = "Task" [ 1354.175032] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.181983] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52694f40-3e28-e973-8cbc-9d96434b8cb7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.203907] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.136s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1354.204100] env[68217]: INFO nova.compute.manager [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Migrating [ 1354.686069] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52694f40-3e28-e973-8cbc-9d96434b8cb7, 'name': SearchDatastore_Task, 'duration_secs': 0.009328} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1354.686376] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1354.686607] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1354.686838] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.686983] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.687175] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1354.687433] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ab5d2fa4-ae1d-4e8e-b2b6-0bc62b546a14 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.696444] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1354.696648] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1354.697284] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-076af2fa-04d1-4d36-abdb-c05e92df5107 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1354.702339] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1354.702339] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]529503f1-7849-ee99-9b39-d6f9406184a3" [ 1354.702339] env[68217]: _type = "Task" [ 1354.702339] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1354.709587] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529503f1-7849-ee99-9b39-d6f9406184a3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1354.718143] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1354.718303] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1354.718471] env[68217]: DEBUG nova.network.neutron [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1355.069847] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.070088] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.070245] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.070404] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.212586] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]529503f1-7849-ee99-9b39-d6f9406184a3, 'name': SearchDatastore_Task, 'duration_secs': 0.011155} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.213333] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b28fcb2e-210f-4726-abc2-76bffe0a4ec8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.218048] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1355.218048] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b92ab3-491d-6257-3734-a6050cfa3ef1" [ 1355.218048] env[68217]: _type = "Task" [ 1355.218048] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.227047] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b92ab3-491d-6257-3734-a6050cfa3ef1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.345024] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.348690] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.433139] env[68217]: DEBUG nova.network.neutron [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1355.728169] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52b92ab3-491d-6257-3734-a6050cfa3ef1, 'name': SearchDatastore_Task, 'duration_secs': 0.01005} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1355.728417] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1355.728671] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1355.728921] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-50bc3500-a5ef-4f73-a3ca-c44bb564d5ea {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1355.735132] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1355.735132] env[68217]: value = "task-2962331" [ 1355.735132] env[68217]: _type = "Task" [ 1355.735132] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1355.742311] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962331, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1355.935835] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1356.245009] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962331, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438476} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.245394] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1356.245394] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1356.245644] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0e9ef934-40b8-49fb-8c64-73cd687a6779 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.251465] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1356.251465] env[68217]: value = "task-2962332" [ 1356.251465] env[68217]: _type = "Task" [ 1356.251465] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.258276] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962332, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1356.348540] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1356.348690] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1356.761848] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962332, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066876} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1356.762137] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1356.762864] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83cc0b6a-6edf-4a5c-a0b8-a9494f06e3de {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.781345] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Reconfiguring VM instance instance-0000007f to attach disk [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1356.781559] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c14b65d8-e2b6-446d-8905-7e773467c975 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1356.800151] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1356.800151] env[68217]: value = "task-2962333" [ 1356.800151] env[68217]: _type = "Task" [ 1356.800151] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1356.807158] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962333, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.310010] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962333, 'name': ReconfigVM_Task, 'duration_secs': 0.249503} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.310328] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Reconfigured VM instance instance-0000007f to attach disk [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52/d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1357.310907] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-91fb8219-4930-4b2c-b879-e2e2cd69160b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.317304] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1357.317304] env[68217]: value = "task-2962334" [ 1357.317304] env[68217]: _type = "Task" [ 1357.317304] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.324816] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962334, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.452980] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75f0bc37-c9cf-4fa2-bbc0-d0884e449b44 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.470265] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance '16244d37-2f3c-4b46-a449-7d0c679bab3f' progress to 0 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1357.827378] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962334, 'name': Rename_Task, 'duration_secs': 0.12494} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1357.827601] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1357.827830] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-54ba1a79-b210-4a2b-bd10-af7f3bb92ebf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.833888] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1357.833888] env[68217]: value = "task-2962335" [ 1357.833888] env[68217]: _type = "Task" [ 1357.833888] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.840933] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962335, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1357.976026] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1357.976381] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-204f5ad5-c02f-40e5-aece-90ee193c562f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1357.983194] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1357.983194] env[68217]: value = "task-2962336" [ 1357.983194] env[68217]: _type = "Task" [ 1357.983194] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1357.992037] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1358.342913] env[68217]: DEBUG oslo_vmware.api [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962335, 'name': PowerOnVM_Task, 'duration_secs': 0.394937} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.343271] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1358.343374] env[68217]: DEBUG nova.compute.manager [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1358.344124] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad83643-bb43-4459-a14b-661c375eddc8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1358.492677] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962336, 'name': PowerOffVM_Task, 'duration_secs': 0.163828} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1358.492932] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1358.493137] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance '16244d37-2f3c-4b46-a449-7d0c679bab3f' progress to 17 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1358.858043] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1358.858160] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1358.858314] env[68217]: DEBUG nova.objects.instance [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1358.999339] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:15Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1358.999654] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1358.999754] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1358.999978] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1359.000167] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1359.000318] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1359.000529] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1359.000690] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1359.000856] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1359.001038] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1359.001224] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1359.006276] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-36266b00-eb3d-49f0-96e7-390b15d3fc7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1359.021656] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1359.021656] env[68217]: value = "task-2962337" [ 1359.021656] env[68217]: _type = "Task" [ 1359.021656] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1359.029598] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962337, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1359.162928] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.163140] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.163372] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1359.163555] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1359.163727] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1359.165816] env[68217]: INFO nova.compute.manager [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Terminating instance [ 1359.531045] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962337, 'name': ReconfigVM_Task, 'duration_secs': 0.158209} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1359.531410] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance '16244d37-2f3c-4b46-a449-7d0c679bab3f' progress to 33 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1359.669709] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "refresh_cache-d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1359.669926] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquired lock "refresh_cache-d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1359.670123] env[68217]: DEBUG nova.network.neutron [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1359.866653] env[68217]: DEBUG oslo_concurrency.lockutils [None req-560ce7cd-798e-4777-acf4-22e03ece45a6 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.008s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1360.037463] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1360.037702] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1360.037875] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1360.038079] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1360.038226] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1360.038363] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1360.038562] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1360.038716] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1360.038877] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1360.039057] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1360.039234] env[68217]: DEBUG nova.virt.hardware [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1360.044489] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1360.044772] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-958726a9-9b00-4fba-8901-8d9da26102a6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.063190] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1360.063190] env[68217]: value = "task-2962338" [ 1360.063190] env[68217]: _type = "Task" [ 1360.063190] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.070492] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962338, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.190966] env[68217]: DEBUG nova.network.neutron [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1360.241894] env[68217]: DEBUG nova.network.neutron [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1360.572700] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962338, 'name': ReconfigVM_Task, 'duration_secs': 0.157418} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1360.573085] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1360.573730] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938b2829-0202-442d-a13b-dc9ff813a8f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.595871] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1360.596125] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba312cef-3ee6-425a-a89e-883c063a0d19 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.613609] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1360.613609] env[68217]: value = "task-2962339" [ 1360.613609] env[68217]: _type = "Task" [ 1360.613609] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.621480] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962339, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1360.744631] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Releasing lock "refresh_cache-d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1360.745054] env[68217]: DEBUG nova.compute.manager [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1360.745248] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1360.746208] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-586732d1-cede-4568-8d09-782f0d1b53bb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.753835] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1360.754098] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8f2c3c39-c55f-4363-843b-a2c286822349 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.760431] env[68217]: DEBUG oslo_vmware.api [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1360.760431] env[68217]: value = "task-2962340" [ 1360.760431] env[68217]: _type = "Task" [ 1360.760431] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1360.767661] env[68217]: DEBUG oslo_vmware.api [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962340, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.123385] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962339, 'name': ReconfigVM_Task, 'duration_secs': 0.256911} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.123615] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1361.123873] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance '16244d37-2f3c-4b46-a449-7d0c679bab3f' progress to 50 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1361.269768] env[68217]: DEBUG oslo_vmware.api [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962340, 'name': PowerOffVM_Task, 'duration_secs': 0.189279} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.270057] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1361.270234] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1361.270479] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-02a0f71d-70f8-4f01-876f-8e58bc37760b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.293237] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1361.293454] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1361.293639] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Deleting the datastore file [datastore1] d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1361.293871] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0059985e-fb3b-4051-a9c4-c2fa10fba893 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.299111] env[68217]: DEBUG oslo_vmware.api [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for the task: (returnval){ [ 1361.299111] env[68217]: value = "task-2962342" [ 1361.299111] env[68217]: _type = "Task" [ 1361.299111] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1361.306218] env[68217]: DEBUG oslo_vmware.api [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962342, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1361.630081] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfdf8b6-8c32-408a-94bf-d15656b86c03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.648530] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13ea6e3a-ffec-4aa6-a183-85ca56cb713e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1361.665266] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance '16244d37-2f3c-4b46-a449-7d0c679bab3f' progress to 67 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1361.808594] env[68217]: DEBUG oslo_vmware.api [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Task: {'id': task-2962342, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.091343} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1361.808836] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1361.809015] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1361.809199] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1361.809362] env[68217]: INFO nova.compute.manager [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Took 1.06 seconds to destroy the instance on the hypervisor. [ 1361.809591] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1361.809771] env[68217]: DEBUG nova.compute.manager [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1361.809876] env[68217]: DEBUG nova.network.neutron [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1361.825592] env[68217]: DEBUG nova.network.neutron [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1362.200713] env[68217]: DEBUG nova.network.neutron [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Port 3f237628-945e-4c8d-bf15-f59386ccb358 binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1362.327473] env[68217]: DEBUG nova.network.neutron [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1362.829532] env[68217]: INFO nova.compute.manager [-] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Took 1.02 seconds to deallocate network for instance. [ 1363.221667] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.221903] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.222090] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1363.335578] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1363.335905] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1363.336186] env[68217]: DEBUG nova.objects.instance [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lazy-loading 'resources' on Instance uuid d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1363.344017] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.899580] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33817eaa-bc36-4e10-be62-e37f1775ffad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.907204] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c23e0429-72c1-4fd1-8608-f7ba3bea0155 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.938497] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da4d98d8-0c1f-4e95-8d2a-4a6832746d58 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.946052] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ae92d66-c5b4-4108-ad0b-b1778c678cf9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1363.958798] env[68217]: DEBUG nova.compute.provider_tree [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1364.263429] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1364.263651] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1364.263834] env[68217]: DEBUG nova.network.neutron [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1364.461654] env[68217]: DEBUG nova.scheduler.client.report [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1364.959619] env[68217]: DEBUG nova.network.neutron [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1364.965857] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.630s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1364.984080] env[68217]: INFO nova.scheduler.client.report [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Deleted allocations for instance d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52 [ 1365.462861] env[68217]: DEBUG oslo_concurrency.lockutils [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1365.491829] env[68217]: DEBUG oslo_concurrency.lockutils [None req-e9201449-88b3-410a-88f2-d1d49c583511 tempest-ServersListShow296Test-320423607 tempest-ServersListShow296Test-320423607-project-member] Lock "d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.328s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1365.987882] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b2863e7-880c-48ee-a4db-d0b549bbcb39 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.006858] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ccf859-5ef5-4d07-b868-3afddc3910dc {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.013351] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance '16244d37-2f3c-4b46-a449-7d0c679bab3f' progress to 83 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1366.519897] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1366.520271] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-21179b31-b3f0-485d-8c77-4375bfb444ca {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1366.527576] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1366.527576] env[68217]: value = "task-2962343" [ 1366.527576] env[68217]: _type = "Task" [ 1366.527576] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1366.536490] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962343, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1367.037385] env[68217]: DEBUG oslo_vmware.api [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962343, 'name': PowerOnVM_Task, 'duration_secs': 0.368759} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1367.037752] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1367.037901] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-51d54f04-80d3-4689-83f0-7a711d3d6d91 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance '16244d37-2f3c-4b46-a449-7d0c679bab3f' progress to 100 {{(pid=68217) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1369.419432] env[68217]: DEBUG nova.network.neutron [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Port 3f237628-945e-4c8d-bf15-f59386ccb358 binding to destination host cpu-1 is already ACTIVE {{(pid=68217) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3228}} [ 1369.419741] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1369.419846] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1369.420013] env[68217]: DEBUG nova.network.neutron [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1370.124769] env[68217]: DEBUG nova.network.neutron [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1370.627658] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1371.131028] env[68217]: DEBUG nova.compute.manager [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Hypervisor driver does not support instance shared storage check, assuming it's not on shared storage {{(pid=68217) _is_instance_storage_shared /opt/stack/nova/nova/compute/manager.py:901}} [ 1371.131326] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1371.131520] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1371.634922] env[68217]: DEBUG nova.objects.instance [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'migration_context' on Instance uuid 16244d37-2f3c-4b46-a449-7d0c679bab3f {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1372.186439] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ceabe8db-6de4-4684-8c43-6891713e1f5a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.194934] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd294a36-8686-47ae-bb35-a2c54034e9b3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.223698] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00044098-f0b2-4b0d-b71f-84f02a89d4b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.230403] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aeef769-e3ba-448d-9f35-d63f6a7b6806 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1372.242717] env[68217]: DEBUG nova.compute.provider_tree [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1372.746406] env[68217]: DEBUG nova.scheduler.client.report [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1373.757173] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_dest" :: held 2.625s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1375.290847] env[68217]: INFO nova.compute.manager [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Swapping old allocation on dict_keys(['42aedcce-ee61-45e1-bf10-c06056d1f548']) held by migration c330fdf2-7619-4878-a5f7-bd6b51793328 for instance [ 1375.311268] env[68217]: DEBUG nova.scheduler.client.report [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Overwriting current allocation {'allocations': {'42aedcce-ee61-45e1-bf10-c06056d1f548': {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}, 'generation': 185}}, 'project_id': '46f4c8c2f4764bd1b995396126b6aaf3', 'user_id': 'c867c8ebcaeb49ec91f751e2be5349b1', 'consumer_generation': 1} on consumer 16244d37-2f3c-4b46-a449-7d0c679bab3f {{(pid=68217) move_allocations /opt/stack/nova/nova/scheduler/client/report.py:2036}} [ 1375.386093] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1375.386288] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1375.386457] env[68217]: DEBUG nova.network.neutron [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1376.088584] env[68217]: DEBUG nova.network.neutron [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [{"id": "3f237628-945e-4c8d-bf15-f59386ccb358", "address": "fa:16:3e:26:d7:79", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f237628-94", "ovs_interfaceid": "3f237628-945e-4c8d-bf15-f59386ccb358", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1376.590948] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-16244d37-2f3c-4b46-a449-7d0c679bab3f" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1376.591431] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1376.591674] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-a9faf1c9-38cc-4d21-b7df-c7aadf7474c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1376.599908] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1376.599908] env[68217]: value = "task-2962344" [ 1376.599908] env[68217]: _type = "Task" [ 1376.599908] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1376.607524] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962344, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.109712] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962344, 'name': PowerOffVM_Task, 'duration_secs': 0.21259} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.109965] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1377.110640] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1377.110843] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1377.110993] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1377.111197] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1377.111336] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1377.111477] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1377.111676] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1377.111832] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1377.111996] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1377.112172] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1377.112345] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1377.117179] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e29ecbe3-0413-4bd0-8d81-b423d3c1d046 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.131241] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1377.131241] env[68217]: value = "task-2962345" [ 1377.131241] env[68217]: _type = "Task" [ 1377.131241] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.138280] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962345, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1377.640901] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962345, 'name': ReconfigVM_Task, 'duration_secs': 0.129454} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1377.641740] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7ccb70c-9a14-4e2a-aa1b-9d3495593edb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.658945] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1377.659166] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1377.659324] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1377.659501] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1377.659644] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1377.659784] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1377.659978] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1377.660170] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1377.660341] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1377.660500] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1377.660668] env[68217]: DEBUG nova.virt.hardware [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1377.661414] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a2ad375-7dae-4db3-a809-518005c394f4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1377.666209] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1377.666209] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d8be66-89fd-c21b-ec91-73e0b78ca007" [ 1377.666209] env[68217]: _type = "Task" [ 1377.666209] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1377.673680] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d8be66-89fd-c21b-ec91-73e0b78ca007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.176850] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52d8be66-89fd-c21b-ec91-73e0b78ca007, 'name': SearchDatastore_Task, 'duration_secs': 0.009689} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.182108] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfiguring VM instance instance-0000007d to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1378.182380] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-33be74b1-95cf-48d3-aa7d-40dd030d6b95 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.199499] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1378.199499] env[68217]: value = "task-2962346" [ 1378.199499] env[68217]: _type = "Task" [ 1378.199499] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.207826] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962346, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1378.709382] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962346, 'name': ReconfigVM_Task, 'duration_secs': 0.153928} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1378.709779] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfigured VM instance instance-0000007d to detach disk 2000 {{(pid=68217) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1378.710458] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba53084-1635-4a62-83d2-65c56529b33b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.731560] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfiguring VM instance instance-0000007d to attach disk [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1378.731819] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-73d5e5c4-c398-4004-902e-f7d6e45ce8c6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1378.749459] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1378.749459] env[68217]: value = "task-2962347" [ 1378.749459] env[68217]: _type = "Task" [ 1378.749459] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1378.759927] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962347, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.258666] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1379.759130] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962347, 'name': ReconfigVM_Task, 'duration_secs': 0.789534} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1379.759529] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Reconfigured VM instance instance-0000007d to attach disk [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f/16244d37-2f3c-4b46-a449-7d0c679bab3f.vmdk or device None with type thin {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1379.760224] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5de07b-5105-4832-8c85-1aafc46f1e5c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.777510] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba00ffc2-d48f-4361-bcbd-61c22cc77aba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.794528] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f58134cf-084d-4bf5-b78e-3127fd431e9c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.810981] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a77d0c6-7444-422a-95fc-dc2d28419f37 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.816852] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1379.817071] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d9f1db67-33e5-4342-8087-cd1d45022949 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1379.822623] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1379.822623] env[68217]: value = "task-2962348" [ 1379.822623] env[68217]: _type = "Task" [ 1379.822623] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1379.829380] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962348, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1380.332342] env[68217]: DEBUG oslo_vmware.api [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962348, 'name': PowerOnVM_Task, 'duration_secs': 0.347757} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1380.332597] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1381.343122] env[68217]: INFO nova.compute.manager [None req-db005374-1955-4576-9c3f-6530f76ed946 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance to original state: 'active' [ 1383.139621] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "16244d37-2f3c-4b46-a449-7d0c679bab3f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.139987] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.140060] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1383.140245] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1383.140417] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1383.142800] env[68217]: INFO nova.compute.manager [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Terminating instance [ 1383.646807] env[68217]: DEBUG nova.compute.manager [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1383.647141] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1383.648261] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b0b280-029c-4db4-b3a9-e80b9bc829c3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.655618] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1383.655845] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6ce205a8-4ec6-4f35-8f7c-3857b083daba {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1383.661401] env[68217]: DEBUG oslo_vmware.api [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1383.661401] env[68217]: value = "task-2962349" [ 1383.661401] env[68217]: _type = "Task" [ 1383.661401] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1383.669455] env[68217]: DEBUG oslo_vmware.api [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962349, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1384.170897] env[68217]: DEBUG oslo_vmware.api [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962349, 'name': PowerOffVM_Task, 'duration_secs': 0.16688} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1384.171309] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1384.171362] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1384.171702] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d6b1794b-4f70-4d78-a9af-c2a64037cad1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.789494] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1384.789748] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1384.789892] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleting the datastore file [datastore1] 16244d37-2f3c-4b46-a449-7d0c679bab3f {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1384.790182] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-5c5b6db2-15d2-476f-a5b7-c93dd5de8813 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1384.795964] env[68217]: DEBUG oslo_vmware.api [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1384.795964] env[68217]: value = "task-2962351" [ 1384.795964] env[68217]: _type = "Task" [ 1384.795964] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1384.803587] env[68217]: DEBUG oslo_vmware.api [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962351, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1385.305503] env[68217]: DEBUG oslo_vmware.api [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962351, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.152949} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1385.305900] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1385.305965] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1385.306097] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1385.306276] env[68217]: INFO nova.compute.manager [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Took 1.66 seconds to destroy the instance on the hypervisor. [ 1385.306517] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1385.306708] env[68217]: DEBUG nova.compute.manager [-] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1385.306805] env[68217]: DEBUG nova.network.neutron [-] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1385.685303] env[68217]: DEBUG nova.compute.manager [req-c280b627-3ff2-4e03-a070-99805de617f1 req-29e740b2-844e-4e80-8eb8-0c15e0be87b8 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Received event network-vif-deleted-3f237628-945e-4c8d-bf15-f59386ccb358 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1385.685303] env[68217]: INFO nova.compute.manager [req-c280b627-3ff2-4e03-a070-99805de617f1 req-29e740b2-844e-4e80-8eb8-0c15e0be87b8 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Neutron deleted interface 3f237628-945e-4c8d-bf15-f59386ccb358; detaching it from the instance and deleting it from the info cache [ 1385.685303] env[68217]: DEBUG nova.network.neutron [req-c280b627-3ff2-4e03-a070-99805de617f1 req-29e740b2-844e-4e80-8eb8-0c15e0be87b8 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.168136] env[68217]: DEBUG nova.network.neutron [-] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1386.187158] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0b9bbb28-3129-4771-8f97-13ecdc3144eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.196914] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127f470c-fed2-4e0d-a8f5-e2df0afb10c9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1386.221938] env[68217]: DEBUG nova.compute.manager [req-c280b627-3ff2-4e03-a070-99805de617f1 req-29e740b2-844e-4e80-8eb8-0c15e0be87b8 service nova] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Detach interface failed, port_id=3f237628-945e-4c8d-bf15-f59386ccb358, reason: Instance 16244d37-2f3c-4b46-a449-7d0c679bab3f could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1386.671805] env[68217]: INFO nova.compute.manager [-] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Took 1.36 seconds to deallocate network for instance. [ 1387.178715] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1387.179114] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1387.179381] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1387.207873] env[68217]: INFO nova.scheduler.client.report [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted allocations for instance 16244d37-2f3c-4b46-a449-7d0c679bab3f [ 1387.716018] env[68217]: DEBUG oslo_concurrency.lockutils [None req-573ba2bc-10d8-4f91-8508-a1c3ae592d16 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "16244d37-2f3c-4b46-a449-7d0c679bab3f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.576s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1389.307596] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "349f502f-63c8-4b23-b007-6c3de035f092" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1389.307883] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1389.810217] env[68217]: DEBUG nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1390.330997] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1390.331295] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1390.332783] env[68217]: INFO nova.compute.claims [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1392.073610] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09752dd0-5026-4666-93f3-ffcefadfd02e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.080826] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c3f5376-c794-40e0-88db-e25dd889f7d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.111856] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41887a27-ca37-42df-bffc-a58bcb8dee24 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.118179] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8d5532-d20f-4882-b160-e58548722668 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1392.130397] env[68217]: DEBUG nova.compute.provider_tree [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1392.633205] env[68217]: DEBUG nova.scheduler.client.report [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1393.138654] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.807s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1393.139194] env[68217]: DEBUG nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1393.644545] env[68217]: DEBUG nova.compute.utils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1393.646048] env[68217]: DEBUG nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Allocating IP information in the background. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1988}} [ 1393.646230] env[68217]: DEBUG nova.network.neutron [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] allocate_for_instance() {{(pid=68217) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1205}} [ 1393.692250] env[68217]: DEBUG nova.policy [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c867c8ebcaeb49ec91f751e2be5349b1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '46f4c8c2f4764bd1b995396126b6aaf3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68217) authorize /opt/stack/nova/nova/policy.py:192}} [ 1393.964267] env[68217]: DEBUG nova.network.neutron [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Successfully created port: bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1394.149602] env[68217]: DEBUG nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1395.159451] env[68217]: DEBUG nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1395.184937] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1395.185187] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1395.185339] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1395.185537] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1395.185654] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1395.185801] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1395.186010] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1395.186172] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1395.186331] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1395.186485] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1395.186653] env[68217]: DEBUG nova.virt.hardware [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1395.187498] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9013f6ea-476e-427b-8359-20d4f6c30ff0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.195397] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a33aab1-ad28-43e3-afe3-27464cca33a4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1395.342435] env[68217]: DEBUG nova.compute.manager [req-d62337d3-1af9-49d7-97d8-1cfb9d1a8c3d req-15526864-cab0-4c3c-89a4-3e3b321a53c0 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Received event network-vif-plugged-bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1395.342746] env[68217]: DEBUG oslo_concurrency.lockutils [req-d62337d3-1af9-49d7-97d8-1cfb9d1a8c3d req-15526864-cab0-4c3c-89a4-3e3b321a53c0 service nova] Acquiring lock "349f502f-63c8-4b23-b007-6c3de035f092-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1395.342998] env[68217]: DEBUG oslo_concurrency.lockutils [req-d62337d3-1af9-49d7-97d8-1cfb9d1a8c3d req-15526864-cab0-4c3c-89a4-3e3b321a53c0 service nova] Lock "349f502f-63c8-4b23-b007-6c3de035f092-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1395.343221] env[68217]: DEBUG oslo_concurrency.lockutils [req-d62337d3-1af9-49d7-97d8-1cfb9d1a8c3d req-15526864-cab0-4c3c-89a4-3e3b321a53c0 service nova] Lock "349f502f-63c8-4b23-b007-6c3de035f092-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1395.343370] env[68217]: DEBUG nova.compute.manager [req-d62337d3-1af9-49d7-97d8-1cfb9d1a8c3d req-15526864-cab0-4c3c-89a4-3e3b321a53c0 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] No waiting events found dispatching network-vif-plugged-bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) pop_instance_event /opt/stack/nova/nova/compute/manager.py:322}} [ 1395.343536] env[68217]: WARNING nova.compute.manager [req-d62337d3-1af9-49d7-97d8-1cfb9d1a8c3d req-15526864-cab0-4c3c-89a4-3e3b321a53c0 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Received unexpected event network-vif-plugged-bd370065-81fa-4702-80fd-8332cf9785f3 for instance with vm_state building and task_state spawning. [ 1395.420837] env[68217]: DEBUG nova.network.neutron [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Successfully updated port: bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1395.924099] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1395.924099] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1395.924099] env[68217]: DEBUG nova.network.neutron [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1396.453816] env[68217]: DEBUG nova.network.neutron [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1396.566591] env[68217]: DEBUG nova.network.neutron [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updating instance_info_cache with network_info: [{"id": "bd370065-81fa-4702-80fd-8332cf9785f3", "address": "fa:16:3e:35:fe:b1", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd370065-81", "ovs_interfaceid": "bd370065-81fa-4702-80fd-8332cf9785f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1397.069676] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1397.070020] env[68217]: DEBUG nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Instance network_info: |[{"id": "bd370065-81fa-4702-80fd-8332cf9785f3", "address": "fa:16:3e:35:fe:b1", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd370065-81", "ovs_interfaceid": "bd370065-81fa-4702-80fd-8332cf9785f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:2003}} [ 1397.070498] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:fe:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'f78b07ea-f425-4622-84f4-706a5d8820a7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bd370065-81fa-4702-80fd-8332cf9785f3', 'vif_model': 'vmxnet3'}] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1397.077791] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1397.077995] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1397.078578] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-095af375-ae7a-4433-9e8b-ebd3fb07cc91 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.097822] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1397.097822] env[68217]: value = "task-2962352" [ 1397.097822] env[68217]: _type = "Task" [ 1397.097822] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.105120] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962352, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1397.369509] env[68217]: DEBUG nova.compute.manager [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Received event network-changed-bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1397.369767] env[68217]: DEBUG nova.compute.manager [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Refreshing instance network info cache due to event network-changed-bd370065-81fa-4702-80fd-8332cf9785f3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1397.370023] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] Acquiring lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.370213] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] Acquired lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.370415] env[68217]: DEBUG nova.network.neutron [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Refreshing network info cache for port bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1397.607693] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962352, 'name': CreateVM_Task, 'duration_secs': 0.299022} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1397.608072] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1397.608493] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1397.608671] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1397.608994] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1397.609252] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ff3b2b45-7c3a-4e85-a981-5593f511b473 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.613375] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1397.613375] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d5931-0901-6f42-9324-0fee6bc921ae" [ 1397.613375] env[68217]: _type = "Task" [ 1397.613375] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.620392] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d5931-0901-6f42-9324-0fee6bc921ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.042882] env[68217]: DEBUG nova.network.neutron [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updated VIF entry in instance network info cache for port bd370065-81fa-4702-80fd-8332cf9785f3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1398.043234] env[68217]: DEBUG nova.network.neutron [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updating instance_info_cache with network_info: [{"id": "bd370065-81fa-4702-80fd-8332cf9785f3", "address": "fa:16:3e:35:fe:b1", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd370065-81", "ovs_interfaceid": "bd370065-81fa-4702-80fd-8332cf9785f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1398.123539] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]521d5931-0901-6f42-9324-0fee6bc921ae, 'name': SearchDatastore_Task, 'duration_secs': 0.009669} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.123817] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.124061] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1398.124294] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1398.124441] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1398.124618] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.124870] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-373e90be-0227-4996-a577-45da7c8eb3e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.132756] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.132927] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1398.133609] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e63bb3ab-13f9-4c96-95fb-881d6f9bd151 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.138098] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1398.138098] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52617e67-aa39-7054-1220-d30164d14f43" [ 1398.138098] env[68217]: _type = "Task" [ 1398.138098] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.145074] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52617e67-aa39-7054-1220-d30164d14f43, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.545731] env[68217]: DEBUG oslo_concurrency.lockutils [req-ec950a5d-71f1-4e7e-90c0-640a6b36cbd5 req-ca65ef4b-4bcd-43e9-9e7f-e02ce75db70a service nova] Releasing lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1398.648192] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52617e67-aa39-7054-1220-d30164d14f43, 'name': SearchDatastore_Task, 'duration_secs': 0.008177} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1398.648926] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68627ef7-e669-49ff-927f-3b92616f4887 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.653698] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1398.653698] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb3905-568a-ba0f-adec-be29bcd3fdb3" [ 1398.653698] env[68217]: _type = "Task" [ 1398.653698] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.660682] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb3905-568a-ba0f-adec-be29bcd3fdb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.164379] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52fb3905-568a-ba0f-adec-be29bcd3fdb3, 'name': SearchDatastore_Task, 'duration_secs': 0.009112} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.164624] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "[datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1399.164819] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 349f502f-63c8-4b23-b007-6c3de035f092/349f502f-63c8-4b23-b007-6c3de035f092.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1399.165084] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b2b2396b-23fb-46ba-96c7-7de5717b3281 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.171172] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1399.171172] env[68217]: value = "task-2962353" [ 1399.171172] env[68217]: _type = "Task" [ 1399.171172] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.178372] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1399.680722] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962353, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.424177} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.681044] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore1] 349f502f-63c8-4b23-b007-6c3de035f092/349f502f-63c8-4b23-b007-6c3de035f092.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1399.681195] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1399.681437] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-cd4354ae-4967-4495-9ca7-970636782b27 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.687722] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1399.687722] env[68217]: value = "task-2962354" [ 1399.687722] env[68217]: _type = "Task" [ 1399.687722] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1399.694414] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962354, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.197239] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962354, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067355} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.197495] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1400.198282] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a295e2f-8c86-47e0-a7e1-771f6ba8c8eb {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.219521] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Reconfiguring VM instance instance-00000080 to attach disk [datastore1] 349f502f-63c8-4b23-b007-6c3de035f092/349f502f-63c8-4b23-b007-6c3de035f092.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1400.219775] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d54b630e-a027-45fa-b922-5e7dfe1e2b8f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.238909] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1400.238909] env[68217]: value = "task-2962355" [ 1400.238909] env[68217]: _type = "Task" [ 1400.238909] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.245922] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962355, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1400.748661] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962355, 'name': ReconfigVM_Task, 'duration_secs': 0.251465} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1400.749048] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Reconfigured VM instance instance-00000080 to attach disk [datastore1] 349f502f-63c8-4b23-b007-6c3de035f092/349f502f-63c8-4b23-b007-6c3de035f092.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1400.749562] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-75fe5bbb-5334-410b-b753-4f081b23a377 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.756208] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1400.756208] env[68217]: value = "task-2962356" [ 1400.756208] env[68217]: _type = "Task" [ 1400.756208] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1400.763613] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962356, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.266141] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962356, 'name': Rename_Task, 'duration_secs': 0.142529} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.266405] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1401.266642] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-84457f1a-ded7-48e2-9812-76b9027e4e2d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.273312] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1401.273312] env[68217]: value = "task-2962357" [ 1401.273312] env[68217]: _type = "Task" [ 1401.273312] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.281198] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962357, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.783420] env[68217]: DEBUG oslo_vmware.api [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962357, 'name': PowerOnVM_Task, 'duration_secs': 0.402259} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.783756] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1401.783858] env[68217]: INFO nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Took 6.62 seconds to spawn the instance on the hypervisor. [ 1401.784044] env[68217]: DEBUG nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1401.784827] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f63d7aa4-9e5e-494d-b719-1801d6583509 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1402.301580] env[68217]: INFO nova.compute.manager [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Took 11.99 seconds to build instance. [ 1402.803897] env[68217]: DEBUG oslo_concurrency.lockutils [None req-961d30d9-22d5-4f5b-9076-110a3c236394 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.496s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1403.350282] env[68217]: DEBUG nova.compute.manager [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Received event network-changed-bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1403.350389] env[68217]: DEBUG nova.compute.manager [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Refreshing instance network info cache due to event network-changed-bd370065-81fa-4702-80fd-8332cf9785f3. {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11742}} [ 1403.350658] env[68217]: DEBUG oslo_concurrency.lockutils [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] Acquiring lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1403.350778] env[68217]: DEBUG oslo_concurrency.lockutils [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] Acquired lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1403.350940] env[68217]: DEBUG nova.network.neutron [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Refreshing network info cache for port bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2064}} [ 1404.053387] env[68217]: DEBUG nova.network.neutron [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updated VIF entry in instance network info cache for port bd370065-81fa-4702-80fd-8332cf9785f3. {{(pid=68217) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3539}} [ 1404.053737] env[68217]: DEBUG nova.network.neutron [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updating instance_info_cache with network_info: [{"id": "bd370065-81fa-4702-80fd-8332cf9785f3", "address": "fa:16:3e:35:fe:b1", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd370065-81", "ovs_interfaceid": "bd370065-81fa-4702-80fd-8332cf9785f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1404.556812] env[68217]: DEBUG oslo_concurrency.lockutils [req-9a667415-e194-499d-b43b-2f8131ba09a2 req-bf753da9-9365-43e1-aac1-b8af29c4dfb1 service nova] Releasing lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1411.349563] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1411.853150] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1411.853433] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1411.853575] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1411.853734] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1411.854988] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15471c34-db9a-4eb1-afbd-c51c9a8fc76b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.863318] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b84f475c-ed89-44ee-9998-869fa0e94b40 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.877732] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05c99292-3530-4abd-af4b-b3aaf075e3b5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.883673] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df866df-9397-47fd-9fb2-0f7a72e7c595 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1411.911053] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180959MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1411.911186] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1411.911380] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1412.936193] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1412.936477] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 349f502f-63c8-4b23-b007-6c3de035f092 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1412.936607] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1412.936785] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1412.971280] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c9eab34-a0b7-4e44-9a28-764bbd29b935 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1412.978295] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35e6aa3-7519-4823-a6f1-ba00734a38d0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.008539] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03bfb4a-a12f-4660-891b-31beb2a32580 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.015019] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7209bdc-967e-456b-875f-b9918a7904c7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1413.027736] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1413.530728] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1414.035544] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1414.035884] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.124s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1414.349403] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.349637] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.349816] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1414.349961] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Cleaning up deleted instances with incomplete migration {{(pid=68217) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11872}} [ 1415.848395] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.848827] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.848827] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1415.848923] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.349313] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.349471] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1419.349321] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1425.851409] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1425.851769] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Cleaning up deleted instances {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11834}} [ 1426.360207] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] There are 26 instances to clean {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11843}} [ 1426.360386] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: d6fe62ad-6086-4cb3-b5c4-7c938a2bfa52] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1426.864297] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 2c0c2bbf-2e48-4bf1-90b7-0ba6aa5d8ce2] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1427.367741] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 16244d37-2f3c-4b46-a449-7d0c679bab3f] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1427.871838] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 945e4574-75b7-4ff7-8e0e-0fee0c90bef1] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1428.375367] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e11b2312-4cc2-4b49-bd26-22fd5629669d] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1428.878377] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: b5fdce0e-465a-4cf0-9a15-313bba7a11e9] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1429.381991] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: c9601da9-f07c-4cea-9a40-0b1bca35a17a] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1429.884848] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 22c8918b-c67e-467c-8aea-7dff71a8d266] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1430.387959] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 5a6f6c64-1c4e-4cce-9185-4cf8e3c40a4c] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1430.891818] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 01c32252-f6e0-4cb0-966e-622872d49199] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1431.395156] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 9844e40f-29ed-48b9-a48f-85fbe10ae2fb] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1431.898362] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 84f7ae5d-abbd-4102-b4a9-5468e0edefc6] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1432.403744] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 1d8973e7-1da3-4c17-9516-007b2356854f] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1432.906069] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 309aa787-aa7d-4508-bf90-499958747c46] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1433.409271] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 759149be-178f-4238-b9c3-c316d060d6be] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1433.912975] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 6789dd7d-d042-4c29-a963-2b4b982d5b43] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1434.416290] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 400058d8-f9ca-41b9-a671-b04b0511d074] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1434.920213] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 89136574-575c-47da-928c-bd7a5dbb3a98] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1435.423250] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 2a6ab572-c1e2-48f5-8805-9d3eccc4fc6b] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1435.926685] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 815d1801-fa07-4466-850d-b1a36d630d46] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1436.430017] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: a4dcc7fb-83e4-4bb9-9c98-9569daee1435] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1436.933483] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: 6b4dff91-254e-43cc-85cf-7de6214dcafd] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1437.437895] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: d28bcf16-b081-4dc8-a975-2acaed222e15] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1437.941605] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: b88912fd-b9e9-4ca0-aba5-9f1b2b2631e1] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1438.347180] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "349f502f-63c8-4b23-b007-6c3de035f092" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1438.347439] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1438.347622] env[68217]: DEBUG nova.compute.manager [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1438.348523] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a9624f4-5ffc-4658-a756-ebb70ed39ac7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1438.354996] env[68217]: DEBUG nova.compute.manager [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=68217) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3404}} [ 1438.355557] env[68217]: DEBUG nova.objects.instance [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'flavor' on Instance uuid 349f502f-63c8-4b23-b007-6c3de035f092 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1438.444900] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: e8ed78ff-94dd-42d3-8a4d-8e58dc788e55] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1438.947634] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] [instance: a86015ea-fa6b-4cf8-9d79-273ffa02ec23] Instance has had 0 of 5 cleanup attempts {{(pid=68217) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11847}} [ 1439.364530] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1439.364831] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6519327b-d849-412a-b20e-a54dae5f8de0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1439.372077] env[68217]: DEBUG oslo_vmware.api [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1439.372077] env[68217]: value = "task-2962358" [ 1439.372077] env[68217]: _type = "Task" [ 1439.372077] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1439.381115] env[68217]: DEBUG oslo_vmware.api [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962358, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1439.881053] env[68217]: DEBUG oslo_vmware.api [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962358, 'name': PowerOffVM_Task, 'duration_secs': 0.164671} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1439.881332] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1439.881545] env[68217]: DEBUG nova.compute.manager [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1439.882310] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5ebe314-a161-442a-a626-24ab341400d1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1440.393878] env[68217]: DEBUG oslo_concurrency.lockutils [None req-66c7cb3c-8fed-45fa-b19b-c5c2ef8fb958 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 2.046s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1440.701271] env[68217]: DEBUG nova.objects.instance [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'flavor' on Instance uuid 349f502f-63c8-4b23-b007-6c3de035f092 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1441.207091] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1441.207293] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1441.207448] env[68217]: DEBUG nova.network.neutron [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1441.207626] env[68217]: DEBUG nova.objects.instance [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'info_cache' on Instance uuid 349f502f-63c8-4b23-b007-6c3de035f092 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1441.711089] env[68217]: DEBUG nova.objects.base [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Object Instance<349f502f-63c8-4b23-b007-6c3de035f092> lazy-loaded attributes: flavor,info_cache {{(pid=68217) wrapper /opt/stack/nova/nova/objects/base.py:136}} [ 1442.407035] env[68217]: DEBUG nova.network.neutron [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updating instance_info_cache with network_info: [{"id": "bd370065-81fa-4702-80fd-8332cf9785f3", "address": "fa:16:3e:35:fe:b1", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd370065-81", "ovs_interfaceid": "bd370065-81fa-4702-80fd-8332cf9785f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1442.910202] env[68217]: DEBUG oslo_concurrency.lockutils [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1443.916669] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1443.917150] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-55e45750-c390-4e89-b8a1-a4292ec72c57 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1443.924573] env[68217]: DEBUG oslo_vmware.api [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1443.924573] env[68217]: value = "task-2962359" [ 1443.924573] env[68217]: _type = "Task" [ 1443.924573] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1443.932560] env[68217]: DEBUG oslo_vmware.api [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962359, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1444.433889] env[68217]: DEBUG oslo_vmware.api [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962359, 'name': PowerOnVM_Task, 'duration_secs': 0.367635} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1444.434265] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1444.434479] env[68217]: DEBUG nova.compute.manager [None req-d3fdc48e-a500-4c6f-ad53-30c8e7debab5 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1444.435248] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f0b4463-cbc2-4d1f-b6e3-ff785f80fa53 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.395944] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfcb57f9-629b-4e0d-bd19-dd52a2495e21 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.402667] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7c6795-5960-4542-b7b4-d65cbcda905f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Suspending the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 1445.402889] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-c1a7e9b2-728f-4dcc-9d4b-dbbb8e82acc8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.409270] env[68217]: DEBUG oslo_vmware.api [None req-1b7c6795-5960-4542-b7b4-d65cbcda905f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1445.409270] env[68217]: value = "task-2962360" [ 1445.409270] env[68217]: _type = "Task" [ 1445.409270] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.416551] env[68217]: DEBUG oslo_vmware.api [None req-1b7c6795-5960-4542-b7b4-d65cbcda905f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962360, 'name': SuspendVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1445.919605] env[68217]: DEBUG oslo_vmware.api [None req-1b7c6795-5960-4542-b7b4-d65cbcda905f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962360, 'name': SuspendVM_Task} progress is 75%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.423151] env[68217]: DEBUG oslo_vmware.api [None req-1b7c6795-5960-4542-b7b4-d65cbcda905f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962360, 'name': SuspendVM_Task, 'duration_secs': 0.636839} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.423550] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7c6795-5960-4542-b7b4-d65cbcda905f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Suspended the VM {{(pid=68217) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 1446.423779] env[68217]: DEBUG nova.compute.manager [None req-1b7c6795-5960-4542-b7b4-d65cbcda905f tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1446.424916] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb477fb2-eaf7-42ce-a52b-1bcc6534d351 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.730023] env[68217]: INFO nova.compute.manager [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Resuming [ 1447.730636] env[68217]: DEBUG nova.objects.instance [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'flavor' on Instance uuid 349f502f-63c8-4b23-b007-6c3de035f092 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1449.241898] env[68217]: DEBUG oslo_concurrency.lockutils [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1449.242335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquired lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1449.243723] env[68217]: DEBUG nova.network.neutron [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1449.929529] env[68217]: DEBUG nova.network.neutron [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updating instance_info_cache with network_info: [{"id": "bd370065-81fa-4702-80fd-8332cf9785f3", "address": "fa:16:3e:35:fe:b1", "network": {"id": "a2cf58c0-1aee-4bec-912e-4dfdb7bc9b32", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-49822402-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.181", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "46f4c8c2f4764bd1b995396126b6aaf3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "f78b07ea-f425-4622-84f4-706a5d8820a7", "external-id": "nsx-vlan-transportzone-126", "segmentation_id": 126, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbd370065-81", "ovs_interfaceid": "bd370065-81fa-4702-80fd-8332cf9785f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1450.432319] env[68217]: DEBUG oslo_concurrency.lockutils [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Releasing lock "refresh_cache-349f502f-63c8-4b23-b007-6c3de035f092" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1450.433701] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffa47a27-ee6e-45ef-909e-df878f3d5efd {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.440211] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Resuming the VM {{(pid=68217) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 1450.440434] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3b1322db-4b00-48a4-9535-b744a16ba779 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1450.446336] env[68217]: DEBUG oslo_vmware.api [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1450.446336] env[68217]: value = "task-2962361" [ 1450.446336] env[68217]: _type = "Task" [ 1450.446336] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1450.453330] env[68217]: DEBUG oslo_vmware.api [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.957440] env[68217]: DEBUG oslo_vmware.api [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962361, 'name': PowerOnVM_Task} progress is 66%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1451.457054] env[68217]: DEBUG oslo_vmware.api [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962361, 'name': PowerOnVM_Task, 'duration_secs': 0.593194} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1451.457473] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Resumed the VM {{(pid=68217) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 1451.457473] env[68217]: DEBUG nova.compute.manager [None req-af33a010-d8a6-4161-a09e-c4a9612cb589 tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1451.458230] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e32d3664-239f-4f70-8ed7-37b86b7b3b03 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.453055] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "349f502f-63c8-4b23-b007-6c3de035f092" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.453349] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1452.453556] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "349f502f-63c8-4b23-b007-6c3de035f092-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1452.453739] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1452.453903] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1452.456243] env[68217]: INFO nova.compute.manager [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Terminating instance [ 1452.959970] env[68217]: DEBUG nova.compute.manager [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1452.960450] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1452.961212] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97ae87b4-b175-4cc5-81ac-61e699c5d6a1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.969277] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1452.969522] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-424aedea-2234-4238-9b96-83c5dab6d2e4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1452.975594] env[68217]: DEBUG oslo_vmware.api [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1452.975594] env[68217]: value = "task-2962362" [ 1452.975594] env[68217]: _type = "Task" [ 1452.975594] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1452.983638] env[68217]: DEBUG oslo_vmware.api [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962362, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1453.485490] env[68217]: DEBUG oslo_vmware.api [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962362, 'name': PowerOffVM_Task, 'duration_secs': 0.173875} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1453.485748] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1453.485911] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1453.486182] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-33098a75-d67a-426f-beb5-bd70356f0591 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.730272] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1453.730529] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Deleting contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1453.730656] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleting the datastore file [datastore1] 349f502f-63c8-4b23-b007-6c3de035f092 {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1453.730920] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b1d40bb7-84cd-40df-959d-2d56d12969b6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1453.738581] env[68217]: DEBUG oslo_vmware.api [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for the task: (returnval){ [ 1453.738581] env[68217]: value = "task-2962364" [ 1453.738581] env[68217]: _type = "Task" [ 1453.738581] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1453.745749] env[68217]: DEBUG oslo_vmware.api [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962364, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1454.248659] env[68217]: DEBUG oslo_vmware.api [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Task: {'id': task-2962364, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144511} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1454.248984] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1454.249088] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Deleted contents of the VM from datastore datastore1 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1454.249260] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1454.249475] env[68217]: INFO nova.compute.manager [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Took 1.29 seconds to destroy the instance on the hypervisor. [ 1454.249720] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1454.249913] env[68217]: DEBUG nova.compute.manager [-] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1454.250015] env[68217]: DEBUG nova.network.neutron [-] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1454.644519] env[68217]: DEBUG nova.compute.manager [req-3ef37273-1fdd-4012-8c35-2537cadb373b req-557dd59d-a1f5-4c25-ba06-9f043c73aaa6 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Received event network-vif-deleted-bd370065-81fa-4702-80fd-8332cf9785f3 {{(pid=68217) external_instance_event /opt/stack/nova/nova/compute/manager.py:11737}} [ 1454.644754] env[68217]: INFO nova.compute.manager [req-3ef37273-1fdd-4012-8c35-2537cadb373b req-557dd59d-a1f5-4c25-ba06-9f043c73aaa6 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Neutron deleted interface bd370065-81fa-4702-80fd-8332cf9785f3; detaching it from the instance and deleting it from the info cache [ 1454.645075] env[68217]: DEBUG nova.network.neutron [req-3ef37273-1fdd-4012-8c35-2537cadb373b req-557dd59d-a1f5-4c25-ba06-9f043c73aaa6 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.129614] env[68217]: DEBUG nova.network.neutron [-] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1455.149284] env[68217]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfcf5cd5-03cd-42a2-a468-c255e895f089 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.160136] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10993fcb-19b6-4984-8db3-472a0d41e629 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1455.183775] env[68217]: DEBUG nova.compute.manager [req-3ef37273-1fdd-4012-8c35-2537cadb373b req-557dd59d-a1f5-4c25-ba06-9f043c73aaa6 service nova] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Detach interface failed, port_id=bd370065-81fa-4702-80fd-8332cf9785f3, reason: Instance 349f502f-63c8-4b23-b007-6c3de035f092 could not be found. {{(pid=68217) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:11571}} [ 1455.633118] env[68217]: INFO nova.compute.manager [-] [instance: 349f502f-63c8-4b23-b007-6c3de035f092] Took 1.38 seconds to deallocate network for instance. [ 1456.138849] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1456.139318] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1456.139582] env[68217]: DEBUG nova.objects.instance [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lazy-loading 'resources' on Instance uuid 349f502f-63c8-4b23-b007-6c3de035f092 {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1456.745014] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-588306b0-d63b-4a85-bd22-c34157563bc6 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.752329] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-494c94c8-0385-4a83-a489-8ffd5af14310 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.781729] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98790b62-1570-494a-bfb8-c6bffcbdb481 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.788941] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-318620eb-8da8-45ec-bb0c-b63f9124b99b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1456.801208] env[68217]: DEBUG nova.compute.provider_tree [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1457.304403] env[68217]: DEBUG nova.scheduler.client.report [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1457.809912] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1457.830651] env[68217]: INFO nova.scheduler.client.report [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Deleted allocations for instance 349f502f-63c8-4b23-b007-6c3de035f092 [ 1458.338930] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7e2293eb-3324-4e90-ad5b-f091c7bbe3df tempest-ServerActionsTestJSON-434688944 tempest-ServerActionsTestJSON-434688944-project-member] Lock "349f502f-63c8-4b23-b007-6c3de035f092" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.885s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1461.493184] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1461.493605] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1461.996035] env[68217]: DEBUG nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Starting instance... {{(pid=68217) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 1462.514871] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1462.515161] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1462.516513] env[68217]: INFO nova.compute.claims [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1463.560177] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76abf640-64f4-404c-877c-721eccc9903f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.567850] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c4ba93-006f-457a-bf8a-a0015ef54453 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.596643] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-495eccd9-ef6d-4bcd-8b9a-5f62353a6c7a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.603219] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fe70d0b-8107-4aa5-bc49-9ed2302d03b1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1463.615678] env[68217]: DEBUG nova.compute.provider_tree [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1464.119033] env[68217]: DEBUG nova.scheduler.client.report [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1464.624870] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.109s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1464.625354] env[68217]: DEBUG nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Start building networks asynchronously for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2836}} [ 1465.130045] env[68217]: DEBUG nova.compute.utils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Using /dev/sd instead of None {{(pid=68217) get_next_device_name /opt/stack/nova/nova/compute/utils.py:239}} [ 1465.132670] env[68217]: DEBUG nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Not allocating networking since 'none' was specified. {{(pid=68217) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1984}} [ 1465.634155] env[68217]: DEBUG nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Start building block device mappings for instance. {{(pid=68217) _build_resources /opt/stack/nova/nova/compute/manager.py:2871}} [ 1466.644687] env[68217]: DEBUG nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Start spawning the instance on the hypervisor. {{(pid=68217) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2645}} [ 1466.728539] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1466.728779] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1466.728935] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1466.729135] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1466.729278] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1466.729421] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1466.729657] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1466.729831] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1466.729997] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1466.730170] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1466.730338] env[68217]: DEBUG nova.virt.hardware [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1466.731204] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07e3d2d-ffea-4778-a97c-eaa4c4193aa2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.738804] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aad8637e-10d1-4e6d-bba2-8a8fd0b674e3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.751887] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1466.757314] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Creating folder: Project (f4a3e2f94d35405080418730a1a44712). Parent ref: group-v594094. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1466.757560] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-34a2c592-be32-4cc7-8218-f9b798953d5d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.828235] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Created folder: Project (f4a3e2f94d35405080418730a1a44712) in parent group-v594094. [ 1466.828412] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Creating folder: Instances. Parent ref: group-v594441. {{(pid=68217) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1466.828639] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4149b3b6-d4b4-4808-bf0b-88f95fb6ec4b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.837161] env[68217]: INFO nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Created folder: Instances in parent group-v594441. [ 1466.837375] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1466.837554] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1466.837735] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8235a614-c374-4922-a0a1-008e9cc2e156 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1466.852308] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1466.852308] env[68217]: value = "task-2962367" [ 1466.852308] env[68217]: _type = "Task" [ 1466.852308] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1466.858971] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962367, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.361708] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962367, 'name': CreateVM_Task, 'duration_secs': 0.240616} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.361889] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1467.362319] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.362476] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1467.362830] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1467.363088] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47680e86-f1ef-411e-b384-1b8fdc100e7b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.367080] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1467.367080] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a28fee-0bed-6868-0d77-20ac6afddc8e" [ 1467.367080] env[68217]: _type = "Task" [ 1467.367080] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.374055] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a28fee-0bed-6868-0d77-20ac6afddc8e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1467.878054] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52a28fee-0bed-6868-0d77-20ac6afddc8e, 'name': SearchDatastore_Task, 'duration_secs': 0.009587} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1467.878437] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1467.878576] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1467.878812] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1467.878961] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1467.879240] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1467.879498] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-46ecfb8e-2d8e-4791-95f2-530c5c936dbf {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.887168] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1467.887352] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1467.888009] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-72237e78-9e08-42ad-a963-c72fbea82cb2 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1467.892991] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1467.892991] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5214087a-d40f-8984-2b4d-2278e946e461" [ 1467.892991] env[68217]: _type = "Task" [ 1467.892991] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1467.899883] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5214087a-d40f-8984-2b4d-2278e946e461, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.403556] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5214087a-d40f-8984-2b4d-2278e946e461, 'name': SearchDatastore_Task, 'duration_secs': 0.007772} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.404333] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd7447a6-4eb5-41e2-aedc-b7f99555580d {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.409356] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1468.409356] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]5276d67e-c812-957b-5c0c-f9c52bbf1c00" [ 1468.409356] env[68217]: _type = "Task" [ 1468.409356] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.417028] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5276d67e-c812-957b-5c0c-f9c52bbf1c00, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1468.919880] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]5276d67e-c812-957b-5c0c-f9c52bbf1c00, 'name': SearchDatastore_Task, 'duration_secs': 0.009837} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1468.920455] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1468.920455] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1468.920645] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4e1ed3a8-fa93-40f0-9064-f76c1be337ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1468.928239] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1468.928239] env[68217]: value = "task-2962368" [ 1468.928239] env[68217]: _type = "Task" [ 1468.928239] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1468.935960] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962368, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.438772] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962368, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.404573} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.439055] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1469.439276] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1469.439516] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-671786f9-f88f-457e-9af3-c66a3c5e3a11 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.446060] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1469.446060] env[68217]: value = "task-2962369" [ 1469.446060] env[68217]: _type = "Task" [ 1469.446060] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1469.454073] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962369, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1469.955458] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962369, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054154} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1469.955893] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1469.956508] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7ec329c-ef5f-4207-83a1-906aee4f0add {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.975356] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Reconfiguring VM instance instance-00000081 to attach disk [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1469.975580] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-76eff926-7f74-487b-ba36-772aebc6fd00 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1469.994119] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1469.994119] env[68217]: value = "task-2962370" [ 1469.994119] env[68217]: _type = "Task" [ 1469.994119] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.001177] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962370, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1470.503910] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962370, 'name': ReconfigVM_Task, 'duration_secs': 0.256468} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1470.504183] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Reconfigured VM instance instance-00000081 to attach disk [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1470.504762] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1bed923d-71b0-401e-8a35-0899895f2068 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1470.510621] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1470.510621] env[68217]: value = "task-2962371" [ 1470.510621] env[68217]: _type = "Task" [ 1470.510621] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1470.517531] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962371, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.020244] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962371, 'name': Rename_Task, 'duration_secs': 0.129236} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.020597] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1471.020707] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5b6c9cbc-88c5-4015-b2bd-24ca745a58df {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1471.027739] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1471.027739] env[68217]: value = "task-2962372" [ 1471.027739] env[68217]: _type = "Task" [ 1471.027739] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1471.034576] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962372, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1471.536972] env[68217]: DEBUG oslo_vmware.api [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962372, 'name': PowerOnVM_Task, 'duration_secs': 0.39886} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1471.537247] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1471.537445] env[68217]: INFO nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Took 4.89 seconds to spawn the instance on the hypervisor. [ 1471.537621] env[68217]: DEBUG nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1471.538382] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4951a7be-bd81-44f6-b8c2-5a4b6713dc2c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1472.053663] env[68217]: INFO nova.compute.manager [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Took 9.55 seconds to build instance. [ 1472.555621] env[68217]: DEBUG oslo_concurrency.lockutils [None req-7c1f4229-c89a-4438-9275-09f9438dda94 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 11.062s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1473.220815] env[68217]: INFO nova.compute.manager [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Rebuilding instance [ 1473.257323] env[68217]: DEBUG nova.compute.manager [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1473.258234] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df712105-3953-46cd-9c75-9e30ae36c19c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.271314] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1474.271897] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-249f453c-8112-46eb-a667-08f0f995a0a7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.278695] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1474.278695] env[68217]: value = "task-2962373" [ 1474.278695] env[68217]: _type = "Task" [ 1474.278695] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.286827] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962373, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1474.788224] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962373, 'name': PowerOffVM_Task, 'duration_secs': 0.110289} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1474.788474] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1474.788700] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1474.789420] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03b50504-7fbc-43df-8e6b-a331ba488d20 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.795457] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1474.795662] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2537c028-bdbb-4bd0-9d12-1a58dafc0553 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.821368] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1474.821610] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1474.821729] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Deleting the datastore file [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1474.821989] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c74d0eab-320b-4d4d-a21a-1d512f0e6e4e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1474.828308] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1474.828308] env[68217]: value = "task-2962375" [ 1474.828308] env[68217]: _type = "Task" [ 1474.828308] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1474.835241] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962375, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1475.338947] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962375, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083926} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1475.340085] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1475.340085] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1475.340085] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1476.369293] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-03-12T08:13:13Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-03-12T08:12:54Z,direct_url=,disk_format='vmdk',id=575ba628-84b6-4b0c-98ba-305166627d10,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='afb0cd3e48d6419f875a94a0a1856550',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-03-12T08:12:55Z,virtual_size=,visibility=), allow threads: False {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:567}} [ 1476.369579] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Flavor limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1476.369679] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Image limits 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:356}} [ 1476.369870] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Flavor pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1476.370025] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Image pref 0:0:0 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:396}} [ 1476.370177] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68217) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:434}} [ 1476.370377] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:573}} [ 1476.370532] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:475}} [ 1476.370692] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Got 1 possible topologies {{(pid=68217) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:505}} [ 1476.370852] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:579}} [ 1476.371096] env[68217]: DEBUG nova.virt.hardware [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68217) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:581}} [ 1476.371947] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b9f59af-28fd-476e-a0c8-8393709f70c5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.379695] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-310d58e9-41ad-45bf-84ed-cfe8cd598055 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.392876] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Instance VIF info [] {{(pid=68217) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1476.398197] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1476.398412] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Creating VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1476.398600] env[68217]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5eb1cab2-6407-46e7-923d-fd3fe369c620 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.413811] env[68217]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1476.413811] env[68217]: value = "task-2962376" [ 1476.413811] env[68217]: _type = "Task" [ 1476.413811] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.420561] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962376, 'name': CreateVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1476.924308] env[68217]: DEBUG oslo_vmware.api [-] Task: {'id': task-2962376, 'name': CreateVM_Task, 'duration_secs': 0.233853} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1476.924519] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Created VM on the ESX host {{(pid=68217) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1476.924856] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1476.925023] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1476.925335] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:324}} [ 1476.925572] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-be27d76b-61bf-4ea9-8448-97b196060ef8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1476.930195] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1476.930195] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e9aeeb-6ee9-8724-d93b-ff3b68dadb6b" [ 1476.930195] env[68217]: _type = "Task" [ 1476.930195] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1476.938067] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e9aeeb-6ee9-8724-d93b-ff3b68dadb6b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.440761] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52e9aeeb-6ee9-8724-d93b-ff3b68dadb6b, 'name': SearchDatastore_Task, 'duration_secs': 0.009961} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.441116] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1477.441286] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Processing image 575ba628-84b6-4b0c-98ba-305166627d10 {{(pid=68217) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1477.441515] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1477.441656] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquired lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1477.441849] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1477.442125] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d2d165aa-1e8b-41a4-92e7-7934a2e5280a {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.449559] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=68217) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1477.449726] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=68217) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1477.450404] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6619fc25-4b19-438c-bcac-1766ffb4f4b0 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.455461] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1477.455461] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]52123a1b-66b9-724f-20a3-5be04318ada5" [ 1477.455461] env[68217]: _type = "Task" [ 1477.455461] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.462575] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52123a1b-66b9-724f-20a3-5be04318ada5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1477.966075] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]52123a1b-66b9-724f-20a3-5be04318ada5, 'name': SearchDatastore_Task, 'duration_secs': 0.00781} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1477.966075] env[68217]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2f2ae773-bcf8-423f-a167-f46bd4d204e5 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1477.970583] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1477.970583] env[68217]: value = "session[524e8b0f-828c-0303-9685-d9311f1dba7a]524a9a92-ea1a-3c0b-4a70-6b7a13b22f39" [ 1477.970583] env[68217]: _type = "Task" [ 1477.970583] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1477.977639] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524a9a92-ea1a-3c0b-4a70-6b7a13b22f39, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.479933] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': session[524e8b0f-828c-0303-9685-d9311f1dba7a]524a9a92-ea1a-3c0b-4a70-6b7a13b22f39, 'name': SearchDatastore_Task, 'duration_secs': 0.009475} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.480312] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Releasing lock "[datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1478.480449] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1478.480695] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c8045438-fb20-4c70-a3ab-34424700bdaa {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1478.487030] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1478.487030] env[68217]: value = "task-2962377" [ 1478.487030] env[68217]: _type = "Task" [ 1478.487030] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1478.493942] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962377, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1478.997014] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962377, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.416454} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1478.997272] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/575ba628-84b6-4b0c-98ba-305166627d10/575ba628-84b6-4b0c-98ba-305166627d10.vmdk to [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk {{(pid=68217) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1478.997475] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Extending root virtual disk to 1048576 {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1478.997712] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-425cbef4-1da1-4d19-b277-9a4f5a557e5f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.004057] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1479.004057] env[68217]: value = "task-2962378" [ 1479.004057] env[68217]: _type = "Task" [ 1479.004057] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.010943] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962378, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1479.512876] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962378, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062541} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1479.513273] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Extended root virtual disk {{(pid=68217) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1479.513931] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8a04f88-b6b4-4482-a838-bd3644d63e1c {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.532380] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Reconfiguring VM instance instance-00000081 to attach disk [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1479.532598] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-29226bc9-61a2-40d0-919d-0f7075ca80d4 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1479.551247] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1479.551247] env[68217]: value = "task-2962379" [ 1479.551247] env[68217]: _type = "Task" [ 1479.551247] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1479.558312] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962379, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.060716] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962379, 'name': ReconfigVM_Task, 'duration_secs': 0.250767} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.060995] env[68217]: DEBUG nova.virt.vmwareapi.volumeops [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Reconfigured VM instance instance-00000081 to attach disk [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd/71f3a07c-4469-4d25-8e02-20b7ce83b0dd.vmdk or device None with type sparse {{(pid=68217) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1480.061609] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f6786502-b306-4094-9eeb-29c640794be7 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.067842] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1480.067842] env[68217]: value = "task-2962380" [ 1480.067842] env[68217]: _type = "Task" [ 1480.067842] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.075938] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962380, 'name': Rename_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1480.577569] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962380, 'name': Rename_Task, 'duration_secs': 0.118738} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1480.577918] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powering on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1480.578077] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4b5c1084-e7d3-4cf0-ad36-4c8231170778 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1480.583779] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1480.583779] env[68217]: value = "task-2962381" [ 1480.583779] env[68217]: _type = "Task" [ 1480.583779] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1480.590810] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962381, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1481.093048] env[68217]: DEBUG oslo_vmware.api [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962381, 'name': PowerOnVM_Task, 'duration_secs': 0.380328} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1481.093340] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powered on the VM {{(pid=68217) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1481.093513] env[68217]: DEBUG nova.compute.manager [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Checking state {{(pid=68217) _get_power_state /opt/stack/nova/nova/compute/manager.py:1798}} [ 1481.094241] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a04c9c11-bf4b-426a-bc06-c182cf55b33e {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1481.607807] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.608171] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1481.608243] env[68217]: DEBUG nova.objects.instance [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Trying to apply a migration context that does not seem to be set for this instance {{(pid=68217) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 1481.814564] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.814829] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1481.815032] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1481.815249] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1481.815464] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1481.817446] env[68217]: INFO nova.compute.manager [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Terminating instance [ 1482.321136] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "refresh_cache-71f3a07c-4469-4d25-8e02-20b7ce83b0dd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1482.321463] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquired lock "refresh_cache-71f3a07c-4469-4d25-8e02-20b7ce83b0dd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:316}} [ 1482.321515] env[68217]: DEBUG nova.network.neutron [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Building network info cache for instance {{(pid=68217) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2067}} [ 1482.615483] env[68217]: DEBUG oslo_concurrency.lockutils [None req-982a2d64-076d-4a66-b0ec-124d3c35c468 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.007s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1482.901581] env[68217]: DEBUG nova.network.neutron [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1482.964017] env[68217]: DEBUG nova.network.neutron [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1483.467324] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Releasing lock "refresh_cache-71f3a07c-4469-4d25-8e02-20b7ce83b0dd" {{(pid=68217) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:334}} [ 1483.467867] env[68217]: DEBUG nova.compute.manager [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Start destroying the instance on the hypervisor. {{(pid=68217) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3164}} [ 1483.468149] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Destroying instance {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1483.469241] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a349065-eba6-437c-b074-80f003113ec1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.477880] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powering off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1483.478177] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f16bce3c-0e6a-48e9-a472-c15c0b41daa9 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.484622] env[68217]: DEBUG oslo_vmware.api [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1483.484622] env[68217]: value = "task-2962382" [ 1483.484622] env[68217]: _type = "Task" [ 1483.484622] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1483.495613] env[68217]: DEBUG oslo_vmware.api [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1483.996091] env[68217]: DEBUG oslo_vmware.api [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962382, 'name': PowerOffVM_Task, 'duration_secs': 0.181549} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1483.996474] env[68217]: DEBUG nova.virt.vmwareapi.vm_util [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Powered off the VM {{(pid=68217) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1483.996575] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Unregistering the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1483.996743] env[68217]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5605c8aa-22ed-40de-b585-c447a01143c1 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.022160] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Unregistered the VM {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1484.022402] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Deleting contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1484.022571] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Deleting the datastore file [datastore2] 71f3a07c-4469-4d25-8e02-20b7ce83b0dd {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1484.022821] env[68217]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d6d2adde-4aa2-4b72-a502-37cbf3ac4aa8 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.029534] env[68217]: DEBUG oslo_vmware.api [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for the task: (returnval){ [ 1484.029534] env[68217]: value = "task-2962384" [ 1484.029534] env[68217]: _type = "Task" [ 1484.029534] env[68217]: } to complete. {{(pid=68217) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1484.036855] env[68217]: DEBUG oslo_vmware.api [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962384, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1484.539569] env[68217]: DEBUG oslo_vmware.api [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Task: {'id': task-2962384, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.086711} completed successfully. {{(pid=68217) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1484.539815] env[68217]: DEBUG nova.virt.vmwareapi.ds_util [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Deleted the datastore file {{(pid=68217) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1484.540068] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Deleted contents of the VM from datastore datastore2 {{(pid=68217) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1484.540262] env[68217]: DEBUG nova.virt.vmwareapi.vmops [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Instance destroyed {{(pid=68217) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1484.540436] env[68217]: INFO nova.compute.manager [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Took 1.07 seconds to destroy the instance on the hypervisor. [ 1484.540672] env[68217]: DEBUG oslo.service.backend.eventlet.loopingcall [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68217) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/backend/eventlet/loopingcall.py:436}} [ 1484.540861] env[68217]: DEBUG nova.compute.manager [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Deallocating network for instance {{(pid=68217) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2296}} [ 1484.540956] env[68217]: DEBUG nova.network.neutron [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] deallocate_for_instance() {{(pid=68217) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1860}} [ 1484.553848] env[68217]: DEBUG nova.network.neutron [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Instance cache missing network info. {{(pid=68217) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3380}} [ 1484.949494] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.949781] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.949901] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.950099] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.950260] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.950406] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.950552] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1484.950693] env[68217]: DEBUG nova.compute.manager [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68217) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:11153}} [ 1484.950839] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager.update_available_resource {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1485.055890] env[68217]: DEBUG nova.network.neutron [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Updating instance_info_cache with network_info: [] {{(pid=68217) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1485.454995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1485.454995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1485.454995] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1485.454995] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68217) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1485.455599] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edef9d24-c8dd-47aa-a7ca-3002a100069b {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.463814] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c834233-dd4a-470a-aaf5-6ca16f655719 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.476927] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6510911-5872-42cd-923d-de3ba922d7ad {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.482881] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1b51f0-93e3-4348-adc3-abb4eed9e384 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1485.511366] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181083MB free_disk=154GB free_vcpus=48 pci_devices=None {{(pid=68217) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1485.511538] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1485.511771] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1485.558579] env[68217]: INFO nova.compute.manager [-] [instance: 71f3a07c-4469-4d25-8e02-20b7ce83b0dd] Took 1.02 seconds to deallocate network for instance. [ 1486.064131] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:405}} [ 1486.536430] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 7056fb29-2a2f-4275-a411-4d5f3fcb421f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1486.536430] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Instance 71f3a07c-4469-4d25-8e02-20b7ce83b0dd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68217) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1486.536594] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1486.536638] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=68217) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1486.571516] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c9c1218-7672-45a0-b324-2ef2b75f135f {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.579365] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e02b4366-a0d7-479b-8b91-bc30ead57316 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.607871] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66d98e57-b150-495b-a202-26d05242dcb3 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.614574] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c84ad5aa-544e-42d7-9948-5694c9b091ec {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1486.627091] env[68217]: DEBUG nova.compute.provider_tree [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1487.129895] env[68217]: DEBUG nova.scheduler.client.report [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1487.635176] env[68217]: DEBUG nova.compute.resource_tracker [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68217) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1487.635423] env[68217]: DEBUG oslo_concurrency.lockutils [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.124s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1487.635666] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.572s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:410}} [ 1487.635888] env[68217]: DEBUG nova.objects.instance [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lazy-loading 'resources' on Instance uuid 71f3a07c-4469-4d25-8e02-20b7ce83b0dd {{(pid=68217) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1488.173721] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8864d437-007d-4801-b0e2-160d94ee1864 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.181264] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f80952-5d27-46a1-91ba-340a1e13a5da {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.210058] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60e5d189-160d-4e97-a017-dcb7e7862770 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.216622] env[68217]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-100feb38-f511-4bfd-9071-2ca5534e9203 {{(pid=68217) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1488.228934] env[68217]: DEBUG nova.compute.provider_tree [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Inventory has not changed in ProviderTree for provider: 42aedcce-ee61-45e1-bf10-c06056d1f548 {{(pid=68217) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1488.732169] env[68217]: DEBUG nova.scheduler.client.report [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Inventory has not changed for provider 42aedcce-ee61-45e1-bf10-c06056d1f548 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 154, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68217) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:958}} [ 1489.238483] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.603s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1489.262110] env[68217]: INFO nova.scheduler.client.report [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Deleted allocations for instance 71f3a07c-4469-4d25-8e02-20b7ce83b0dd [ 1489.770287] env[68217]: DEBUG oslo_concurrency.lockutils [None req-db6c3694-fb1d-48dc-a1f0-db4b7db293c3 tempest-ServerShowV257Test-449905431 tempest-ServerShowV257Test-449905431-project-member] Lock "71f3a07c-4469-4d25-8e02-20b7ce83b0dd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.955s {{(pid=68217) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:424}} [ 1490.031756] env[68217]: DEBUG oslo_service.periodic_task [None req-682c4bcd-e1cb-4c1a-88d9-d1e9a086dca2 None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68217) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}}